From aa6d1a2de97f6395b7176e6fafee5bba427ab708 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Tue, 9 Aug 2022 17:38:29 +0500 Subject: [PATCH 1/4] Output SparkThrowable to SQL golden files in JSON format --- .../sql-tests/results/ansi/array.sql.out | 82 +- .../sql-tests/results/ansi/cast.sql.out | 238 ++---- .../sql-tests/results/ansi/date.sql.out | 65 +- .../ansi/datetime-parsing-invalid.sql.out | 68 +- .../ansi/decimalArithmeticOperations.sql.out | 50 +- .../ansi/higher-order-functions.sql.out | 2 +- .../sql-tests/results/ansi/interval.sql.out | 520 +++---------- .../sql-tests/results/ansi/literals.sql.out | 105 +-- .../sql-tests/results/ansi/map.sql.out | 24 +- .../results/ansi/string-functions.sql.out | 36 +- .../sql-tests/results/ansi/timestamp.sql.out | 61 +- .../results/ansi/try_arithmetic.sql.out | 2 +- .../ansi/try_datetime_functions.sql.out | 3 +- .../results/ansi/try_element_at.sql.out | 2 +- .../resources/sql-tests/results/array.sql.out | 8 +- .../sql-tests/results/bitwise.sql.out | 4 +- .../resources/sql-tests/results/cast.sql.out | 19 +- .../ceil-floor-with-scale-param.sql.out | 12 +- .../sql-tests/results/change-column.sql.out | 26 +- .../sql-tests/results/charvarchar.sql.out | 4 +- .../results/columnresolution-negative.sql.out | 28 +- .../sql-tests/results/comments.sql.out | 31 +- .../resources/sql-tests/results/count.sql.out | 4 +- .../sql-tests/results/csv-functions.sql.out | 31 +- .../sql-tests/results/cte-legacy.sql.out | 2 +- .../sql-tests/results/cte-nested.sql.out | 16 +- .../resources/sql-tests/results/cte.sql.out | 24 +- .../resources/sql-tests/results/date.sql.out | 60 +- .../datetime-formatting-invalid.sql.out | 66 +- .../sql-tests/results/datetime-legacy.sql.out | 88 +-- .../results/datetime-parsing-invalid.sql.out | 24 +- .../sql-tests/results/describe-query.sql.out | 24 +- .../sql-tests/results/describe.sql.out | 17 +- .../sql-tests/results/except-all.sql.out | 4 +- .../sql-tests/results/extract.sql.out | 22 +- .../sql-tests/results/group-analytics.sql.out | 26 +- .../sql-tests/results/group-by-filter.sql.out | 6 +- .../results/group-by-ordinal.sql.out | 20 +- .../sql-tests/results/group-by.sql.out | 48 +- .../sql-tests/results/grouping_set.sql.out | 16 +- .../sql-tests/results/having.sql.out | 2 +- .../results/higher-order-functions.sql.out | 2 +- .../sql-tests/results/ilike-all.sql.out | 7 +- .../sql-tests/results/ilike-any.sql.out | 7 +- .../sql-tests/results/inline-table.sql.out | 12 +- .../sql-tests/results/intersect-all.sql.out | 4 +- .../sql-tests/results/interval.sql.out | 465 +++--------- .../sql-tests/results/join-lateral.sql.out | 41 +- .../sql-tests/results/json-functions.sql.out | 47 +- .../sql-tests/results/like-all.sql.out | 7 +- .../sql-tests/results/like-any.sql.out | 7 +- .../resources/sql-tests/results/limit.sql.out | 12 +- .../sql-tests/results/literals.sql.out | 105 +-- .../resources/sql-tests/results/map.sql.out | 4 +- .../sql-tests/results/misc-functions.sql.out | 28 +- .../sql-tests/results/natural-join.sql.out | 2 +- .../results/order-by-ordinal.sql.out | 6 +- .../sql-tests/results/percentiles.sql.out | 24 +- .../resources/sql-tests/results/pivot.sql.out | 18 +- .../postgreSQL/aggregates_part1.sql.out | 4 +- .../postgreSQL/aggregates_part3.sql.out | 2 +- .../results/postgreSQL/boolean.sql.out | 81 +- .../results/postgreSQL/create_view.sql.out | 36 +- .../sql-tests/results/postgreSQL/date.sql.out | 84 +-- .../results/postgreSQL/float4.sql.out | 26 +- .../results/postgreSQL/float8.sql.out | 22 +- .../sql-tests/results/postgreSQL/int4.sql.out | 42 +- .../sql-tests/results/postgreSQL/int8.sql.out | 56 +- .../results/postgreSQL/interval.sql.out | 70 +- .../sql-tests/results/postgreSQL/join.sql.out | 20 +- .../results/postgreSQL/limit.sql.out | 4 +- .../results/postgreSQL/numeric.sql.out | 11 +- .../results/postgreSQL/select_having.sql.out | 11 +- .../postgreSQL/select_implicit.sql.out | 12 +- .../results/postgreSQL/strings.sql.out | 16 +- .../sql-tests/results/postgreSQL/text.sql.out | 12 +- .../results/postgreSQL/union.sql.out | 65 +- .../results/postgreSQL/window_part2.sql.out | 15 +- .../results/postgreSQL/window_part3.sql.out | 44 +- .../results/postgreSQL/window_part4.sql.out | 6 +- .../sql-tests/results/postgreSQL/with.sql.out | 18 +- .../results/query_regex_column.sql.out | 16 +- .../sql-tests/results/random.sql.out | 4 +- .../results/regexp-functions.sql.out | 14 +- .../sql-tests/results/show-tables.sql.out | 24 +- .../sql-tests/results/show-views.sql.out | 2 +- .../sql-tests/results/show_columns.sql.out | 10 +- .../sql-compatibility-functions.sql.out | 2 +- .../results/string-functions.sql.out | 16 +- .../subquery/in-subquery/in-basic.sql.out | 10 +- .../invalid-correlation.sql.out | 19 +- .../subq-input-typecheck.sql.out | 34 +- .../sql-tests/results/table-aliases.sql.out | 6 +- .../results/table-valued-functions.sql.out | 30 +- .../results/tablesample-negative.sql.out | 14 +- .../sql-tests/results/timestamp-ntz.sql.out | 2 +- .../sql-tests/results/timestamp.sql.out | 55 +- .../timestampNTZ/timestamp-ansi.sql.out | 48 +- .../results/timestampNTZ/timestamp.sql.out | 40 +- .../sql-tests/results/timezone.sql.out | 42 +- .../sql-tests/results/transform.sql.out | 84 +-- .../sql-tests/results/try_arithmetic.sql.out | 2 +- .../results/try_datetime_functions.sql.out | 3 +- .../sql-tests/results/try_element_at.sql.out | 2 +- .../native/booleanEquality.sql.out | 48 +- .../native/caseWhenCoercion.sql.out | 140 ++-- .../native/dateTimeOperations.sql.out | 54 +- .../native/decimalPrecision.sql.out | 704 +++++++++--------- .../typeCoercion/native/division.sql.out | 160 ++-- .../typeCoercion/native/ifCoercion.sql.out | 140 ++-- .../typeCoercion/native/inConversion.sql.out | 280 +++---- .../typeCoercion/native/mapZipWith.sql.out | 4 +- .../typeCoercion/native/mapconcat.sql.out | 10 +- .../native/promoteStrings.sql.out | 94 +-- .../native/stringCastAndExpressions.sql.out | 15 +- .../native/widenSetOperationTypes.sql.out | 140 ++-- .../native/windowFrameCoercion.sql.out | 8 +- .../resources/sql-tests/results/udaf.sql.out | 4 +- .../postgreSQL/udf-aggregates_part1.sql.out | 4 +- .../postgreSQL/udf-aggregates_part3.sql.out | 2 +- .../results/udf/postgreSQL/udf-join.sql.out | 20 +- .../udf/postgreSQL/udf-select_having.sql.out | 11 +- .../postgreSQL/udf-select_implicit.sql.out | 12 +- .../results/udf/udf-except-all.sql.out | 4 +- .../sql-tests/results/udf/udf-except.sql.out | 10 +- .../results/udf/udf-group-analytics.sql.out | 12 +- .../results/udf/udf-group-by.sql.out | 37 +- .../results/udf/udf-inline-table.sql.out | 12 +- .../results/udf/udf-intersect-all.sql.out | 4 +- .../sql-tests/results/udf/udf-pivot.sql.out | 18 +- .../sql-tests/results/udf/udf-udaf.sql.out | 4 +- .../sql-tests/results/udf/udf-window.sql.out | 22 +- .../sql-tests/results/window.sql.out | 48 +- .../apache/spark/sql/SQLQueryTestHelper.scala | 35 +- 134 files changed, 2030 insertions(+), 3850 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out index 2c5cea7bf8505..0337f7421073d 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out @@ -128,7 +128,7 @@ select sort_array(array('b', 'd'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'sort_array(array('b', 'd'), '1')' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select sort_array(array('b', 'd'), cast(NULL as boolean)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'sort_array(array('b', 'd'), CAST(NULL AS BOOLEAN))' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -165,10 +165,7 @@ select element_at(array(1, 2, 3), 5) struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -[INVALID_ARRAY_INDEX_IN_ELEMENT_AT] The index 5 is out of bounds. The array has 3 elements. Use `try_element_at` to tolerate accessing element at invalid index and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select element_at(array(1, 2, 3), 5) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INVALID_ARRAY_INDEX_IN_ELEMENT_AT","messageParameters":["5","3","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":35,"fragment":"element_at(array(1, 2, 3), 5"}]} -- !query @@ -177,10 +174,7 @@ select element_at(array(1, 2, 3), -5) struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -[INVALID_ARRAY_INDEX_IN_ELEMENT_AT] The index -5 is out of bounds. The array has 3 elements. Use `try_element_at` to tolerate accessing element at invalid index and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select element_at(array(1, 2, 3), -5) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INVALID_ARRAY_INDEX_IN_ELEMENT_AT","messageParameters":["-5","3","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":36,"fragment":"element_at(array(1, 2, 3), -5"}]} -- !query @@ -189,7 +183,7 @@ select element_at(array(1, 2, 3), 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -[ELEMENT_AT_BY_INDEX_ZERO] The index 0 is invalid. An index shall be either < 0 or > 0 (the first element has index 1). +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} -- !query @@ -197,11 +191,8 @@ select elt(4, '123', '456') -- !query schema struct<> -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -[INVALID_ARRAY_INDEX] The index 4 is out of bounds. The array has 2 elements. Use `try_element_at` and increase the array index by 1(the starting array index is 1 for `try_element_at`) to tolerate accessing element at invalid index and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select elt(4, '123', '456') - ^^^^^^^^^^^^^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -209,11 +200,8 @@ select elt(0, '123', '456') -- !query schema struct<> -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -[INVALID_ARRAY_INDEX] The index 0 is out of bounds. The array has 2 elements. Use `try_element_at` and increase the array index by 1(the starting array index is 1 for `try_element_at`) to tolerate accessing element at invalid index and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select elt(0, '123', '456') - ^^^^^^^^^^^^^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -221,11 +209,8 @@ select elt(-1, '123', '456') -- !query schema struct<> -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -[INVALID_ARRAY_INDEX] The index -1 is out of bounds. The array has 2 elements. Use `try_element_at` and increase the array index by 1(the starting array index is 1 for `try_element_at`) to tolerate accessing element at invalid index and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select elt(-1, '123', '456') - ^^^^^^^^^^^^^^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -266,10 +251,7 @@ select array(1, 2, 3)[5] struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -[INVALID_ARRAY_INDEX] The index 5 is out of bounds. The array has 3 elements. Use `try_element_at` and increase the array index by 1(the starting array index is 1 for `try_element_at`) to tolerate accessing element at invalid index and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select array(1, 2, 3)[5] - ^^^^^^^^^^^^^^^^^ +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["5","3","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":23,"fragment":"array(1, 2, 3)[5"}]} -- !query @@ -278,10 +260,7 @@ select array(1, 2, 3)[-1] struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -[INVALID_ARRAY_INDEX] The index -1 is out of bounds. The array has 3 elements. Use `try_element_at` and increase the array index by 1(the starting array index is 1 for `try_element_at`) to tolerate accessing element at invalid index and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select array(1, 2, 3)[-1] - ^^^^^^^^^^^^^^^^^^ +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["-1","3","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":24,"fragment":"array(1, 2, 3)[-1"}]} -- !query @@ -322,7 +301,7 @@ select array_size(map('a', 1, 'b', 2)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'array_size(map('a', 1, 'b', 2))' due to data type mismatch: argument 1 requires array type, however, 'map('a', 1, 'b', 2)' is of map type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -355,10 +334,7 @@ select element_at(array(1, 2, 3), 5) struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -[INVALID_ARRAY_INDEX_IN_ELEMENT_AT] The index 5 is out of bounds. The array has 3 elements. Use `try_element_at` to tolerate accessing element at invalid index and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select element_at(array(1, 2, 3), 5) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INVALID_ARRAY_INDEX_IN_ELEMENT_AT","messageParameters":["5","3","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":35,"fragment":"element_at(array(1, 2, 3), 5"}]} -- !query @@ -367,10 +343,7 @@ select element_at(array(1, 2, 3), -5) struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -[INVALID_ARRAY_INDEX_IN_ELEMENT_AT] The index -5 is out of bounds. The array has 3 elements. Use `try_element_at` to tolerate accessing element at invalid index and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select element_at(array(1, 2, 3), -5) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INVALID_ARRAY_INDEX_IN_ELEMENT_AT","messageParameters":["-5","3","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":36,"fragment":"element_at(array(1, 2, 3), -5"}]} -- !query @@ -379,7 +352,7 @@ select element_at(array(1, 2, 3), 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -[ELEMENT_AT_BY_INDEX_ZERO] The index 0 is invalid. An index shall be either < 0 or > 0 (the first element has index 1). +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} -- !query @@ -387,11 +360,8 @@ select elt(4, '123', '456') -- !query schema struct<> -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -[INVALID_ARRAY_INDEX] The index 4 is out of bounds. The array has 2 elements. Use `try_element_at` and increase the array index by 1(the starting array index is 1 for `try_element_at`) to tolerate accessing element at invalid index and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select elt(4, '123', '456') - ^^^^^^^^^^^^^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -399,11 +369,8 @@ select elt(0, '123', '456') -- !query schema struct<> -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -[INVALID_ARRAY_INDEX] The index 0 is out of bounds. The array has 2 elements. Use `try_element_at` and increase the array index by 1(the starting array index is 1 for `try_element_at`) to tolerate accessing element at invalid index and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select elt(0, '123', '456') - ^^^^^^^^^^^^^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -411,8 +378,5 @@ select elt(-1, '123', '456') -- !query schema struct<> -- !query output -org.apache.spark.SparkArrayIndexOutOfBoundsException -[INVALID_ARRAY_INDEX] The index -1 is out of bounds. The array has 2 elements. Use `try_element_at` and increase the array index by 1(the starting array index is 1 for `try_element_at`) to tolerate accessing element at invalid index and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select elt(-1, '123', '456') - ^^^^^^^^^^^^^^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out index c4b454b135c9e..469f775932e3c 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out @@ -5,10 +5,7 @@ SELECT CAST('1.23' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '1.23' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('1.23' AS int) - ^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1.23'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":25,"fragment":"CAST('1.23' AS int"}]} -- !query @@ -17,10 +14,7 @@ SELECT CAST('1.23' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '1.23' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('1.23' AS long) - ^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1.23'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"CAST('1.23' AS long"}]} -- !query @@ -29,10 +23,7 @@ SELECT CAST('-4.56' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '-4.56' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('-4.56' AS int) - ^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'-4.56'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"CAST('-4.56' AS int"}]} -- !query @@ -41,10 +32,7 @@ SELECT CAST('-4.56' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '-4.56' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('-4.56' AS long) - ^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'-4.56'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"CAST('-4.56' AS long"}]} -- !query @@ -53,10 +41,7 @@ SELECT CAST('abc' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'abc' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('abc' AS int) - ^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'abc'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":24,"fragment":"CAST('abc' AS int"}]} -- !query @@ -65,10 +50,7 @@ SELECT CAST('abc' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'abc' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('abc' AS long) - ^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'abc'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":25,"fragment":"CAST('abc' AS long"}]} -- !query @@ -77,10 +59,7 @@ SELECT CAST('abc' AS float) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'abc' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('abc' AS float) - ^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'abc'","\"STRING\"","\"FLOAT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"CAST('abc' AS float"}]} -- !query @@ -89,10 +68,7 @@ SELECT CAST('abc' AS double) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'abc' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('abc' AS double) - ^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'abc'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"CAST('abc' AS double"}]} -- !query @@ -101,10 +77,7 @@ SELECT CAST('1234567890123' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '1234567890123' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('1234567890123' AS int) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1234567890123'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":34,"fragment":"CAST('1234567890123' AS int"}]} -- !query @@ -113,10 +86,7 @@ SELECT CAST('12345678901234567890123' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '12345678901234567890123' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('12345678901234567890123' AS long) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'12345678901234567890123'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":45,"fragment":"CAST('12345678901234567890123' AS long"}]} -- !query @@ -125,10 +95,7 @@ SELECT CAST('' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('' AS int) - ^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["''","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"CAST('' AS int"}]} -- !query @@ -137,10 +104,7 @@ SELECT CAST('' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('' AS long) - ^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["''","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":22,"fragment":"CAST('' AS long"}]} -- !query @@ -149,10 +113,7 @@ SELECT CAST('' AS float) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('' AS float) - ^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["''","\"STRING\"","\"FLOAT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":23,"fragment":"CAST('' AS float"}]} -- !query @@ -161,10 +122,7 @@ SELECT CAST('' AS double) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('' AS double) - ^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["''","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":24,"fragment":"CAST('' AS double"}]} -- !query @@ -189,10 +147,7 @@ SELECT CAST('123.a' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '123.a' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('123.a' AS int) - ^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'123.a'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"CAST('123.a' AS int"}]} -- !query @@ -201,10 +156,7 @@ SELECT CAST('123.a' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '123.a' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('123.a' AS long) - ^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'123.a'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"CAST('123.a' AS long"}]} -- !query @@ -213,10 +165,7 @@ SELECT CAST('123.a' AS float) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '123.a' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('123.a' AS float) - ^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'123.a'","\"STRING\"","\"FLOAT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":28,"fragment":"CAST('123.a' AS float"}]} -- !query @@ -225,10 +174,7 @@ SELECT CAST('123.a' AS double) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '123.a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('123.a' AS double) - ^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'123.a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"CAST('123.a' AS double"}]} -- !query @@ -245,10 +191,7 @@ SELECT CAST('-2147483649' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '-2147483649' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('-2147483649' AS int) - ^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'-2147483649'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":32,"fragment":"CAST('-2147483649' AS int"}]} -- !query @@ -265,10 +208,7 @@ SELECT CAST('2147483648' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '2147483648' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('2147483648' AS int) - ^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'2147483648'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":31,"fragment":"CAST('2147483648' AS int"}]} -- !query @@ -285,10 +225,7 @@ SELECT CAST('-9223372036854775809' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '-9223372036854775809' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('-9223372036854775809' AS long) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'-9223372036854775809'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":42,"fragment":"CAST('-9223372036854775809' AS long"}]} -- !query @@ -305,10 +242,7 @@ SELECT CAST('9223372036854775808' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '9223372036854775808' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT CAST('9223372036854775808' AS long) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'9223372036854775808'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":41,"fragment":"CAST('9223372036854775808' AS long"}]} -- !query @@ -325,10 +259,7 @@ SELECT HEX(CAST(CAST(123 AS byte) AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(CAST(123 AS TINYINT) AS BINARY)' due to data type mismatch: - cannot cast tinyint to binary with ANSI mode on. - If you have to cast tinyint to binary, you can set spark.sql.ansi.enabled as false. -; line 1 pos 11 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -337,10 +268,7 @@ SELECT HEX(CAST(CAST(-123 AS byte) AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(CAST(-123 AS TINYINT) AS BINARY)' due to data type mismatch: - cannot cast tinyint to binary with ANSI mode on. - If you have to cast tinyint to binary, you can set spark.sql.ansi.enabled as false. -; line 1 pos 11 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -349,10 +277,7 @@ SELECT HEX(CAST(123S AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(123S AS BINARY)' due to data type mismatch: - cannot cast smallint to binary with ANSI mode on. - If you have to cast smallint to binary, you can set spark.sql.ansi.enabled as false. -; line 1 pos 11 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -361,10 +286,7 @@ SELECT HEX(CAST(-123S AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(-123S AS BINARY)' due to data type mismatch: - cannot cast smallint to binary with ANSI mode on. - If you have to cast smallint to binary, you can set spark.sql.ansi.enabled as false. -; line 1 pos 11 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -373,10 +295,7 @@ SELECT HEX(CAST(123 AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(123 AS BINARY)' due to data type mismatch: - cannot cast int to binary with ANSI mode on. - If you have to cast int to binary, you can set spark.sql.ansi.enabled as false. -; line 1 pos 11 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -385,10 +304,7 @@ SELECT HEX(CAST(-123 AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(-123 AS BINARY)' due to data type mismatch: - cannot cast int to binary with ANSI mode on. - If you have to cast int to binary, you can set spark.sql.ansi.enabled as false. -; line 1 pos 11 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -397,10 +313,7 @@ SELECT HEX(CAST(123L AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(123L AS BINARY)' due to data type mismatch: - cannot cast bigint to binary with ANSI mode on. - If you have to cast bigint to binary, you can set spark.sql.ansi.enabled as false. -; line 1 pos 11 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -409,10 +322,7 @@ SELECT HEX(CAST(-123L AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(-123L AS BINARY)' due to data type mismatch: - cannot cast bigint to binary with ANSI mode on. - If you have to cast bigint to binary, you can set spark.sql.ansi.enabled as false. -; line 1 pos 11 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -470,12 +380,7 @@ SELECT CAST(interval 3 month 1 hour AS string) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval 3 month 1 hour(line 1, pos 12) - -== SQL == -SELECT CAST(interval 3 month 1 hour AS string) -------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -564,10 +469,7 @@ select cast('1中文' as tinyint) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '1中文' of the type "STRING" cannot be cast to "TINYINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast('1中文' as tinyint) - ^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1中文'","\"STRING\"","\"TINYINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":28,"fragment":"cast('1中文' as tinyint"}]} -- !query @@ -576,10 +478,7 @@ select cast('1中文' as smallint) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '1中文' of the type "STRING" cannot be cast to "SMALLINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast('1中文' as smallint) - ^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1中文'","\"STRING\"","\"SMALLINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"cast('1中文' as smallint"}]} -- !query @@ -588,10 +487,7 @@ select cast('1中文' as INT) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '1中文' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast('1中文' as INT) - ^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1中文'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":24,"fragment":"cast('1中文' as INT"}]} -- !query @@ -600,10 +496,7 @@ select cast('中文1' as bigint) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '中文1' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast('中文1' as bigint) - ^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'中文1'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"cast('中文1' as bigint"}]} -- !query @@ -612,10 +505,7 @@ select cast('1中文' as bigint) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '1中文' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast('1中文' as bigint) - ^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1中文'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"cast('1中文' as bigint"}]} -- !query @@ -642,11 +532,7 @@ select cast('\t\n xyz \t\r' as boolean) struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value ' - xyz ' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast('\t\n xyz \t\r' as boolean) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'\t\n xyz \t\r'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":38,"fragment":"cast('\\t\\n xyz \\t\\r' as boolean"}]} -- !query @@ -663,10 +549,7 @@ select cast('123.45' as decimal(4, 2)) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 123.45, 5, 2) cannot be represented as Decimal(4, 2). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast('123.45' as decimal(4, 2)) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 123.45, 5, 2)","4","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":37,"fragment":"cast('123.45' as decimal(4, 2)"}]} -- !query @@ -675,10 +558,7 @@ select cast('xyz' as decimal(4, 2)) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'xyz' of the type "STRING" cannot be cast to "DECIMAL(4,2)" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast('xyz' as decimal(4, 2)) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'xyz'","\"STRING\"","\"DECIMAL(4,2)\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":34,"fragment":"cast('xyz' as decimal(4, 2)"}]} -- !query @@ -695,10 +575,7 @@ select cast('a' as date) struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DATE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast('a' as date) - ^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DATE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":23,"fragment":"cast('a' as date"}]} -- !query @@ -715,10 +592,7 @@ select cast('a' as timestamp) struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast('a' as timestamp) - ^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":28,"fragment":"cast('a' as timestamp"}]} -- !query @@ -735,10 +609,7 @@ select cast('a' as timestamp_ntz) struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "TIMESTAMP_NTZ" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast('a' as timestamp_ntz) - ^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"TIMESTAMP_NTZ\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":32,"fragment":"cast('a' as timestamp_ntz"}]} -- !query @@ -747,10 +618,7 @@ select cast(cast('inf' as double) as timestamp) struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value Infinity of the type "DOUBLE" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast(cast('inf' as double) as timestamp) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["Infinity","\"DOUBLE\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":46,"fragment":"cast(cast('inf' as double) as timestamp"}]} -- !query @@ -759,10 +627,7 @@ select cast(cast('inf' as float) as timestamp) struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value Infinity of the type "DOUBLE" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast(cast('inf' as float) as timestamp) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["Infinity","\"DOUBLE\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":45,"fragment":"cast(cast('inf' as float) as timestamp"}]} -- !query @@ -803,7 +668,7 @@ select cast(interval '23:59:59' hour to second as smallint) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value INTERVAL '23:59:59' HOUR TO SECOND of the type "INTERVAL HOUR TO SECOND" cannot be cast to "SMALLINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["INTERVAL '23:59:59' HOUR TO SECOND","\"INTERVAL HOUR TO SECOND\"","\"SMALLINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -828,7 +693,7 @@ select cast(interval '-1000' month as tinyint) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value INTERVAL '-1000' MONTH of the type "INTERVAL MONTH" cannot be cast to "TINYINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["INTERVAL '-1000' MONTH","\"INTERVAL MONTH\"","\"TINYINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -837,7 +702,7 @@ select cast(interval '1000000' second as smallint) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value INTERVAL '1000000' SECOND of the type "INTERVAL SECOND" cannot be cast to "SMALLINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["INTERVAL '1000000' SECOND","\"INTERVAL SECOND\"","\"SMALLINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -902,7 +767,7 @@ select cast(2147483647 as interval year) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value 2147483647 of the type "INT" cannot be cast to "INTERVAL YEAR" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["2147483647","\"INT\"","\"INTERVAL YEAR\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -911,7 +776,7 @@ select cast(-9223372036854775808L as interval day) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value -9223372036854775808L of the type "BIGINT" cannot be cast to "INTERVAL DAY" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["-9223372036854775808L","\"BIGINT\"","\"INTERVAL DAY\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -976,7 +841,4 @@ select cast(interval '10.123' second as decimal(1, 0)) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(compact, 10, 18, 6) cannot be represented as Decimal(1, 0). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast(interval '10.123' second as decimal(1, 0)) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(compact, 10, 18, 6)","1","0","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":53,"fragment":"cast(interval '10.123' second as decimal(1, 0)"}]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index a6c8b273ba68f..c746fa1cc1627 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -21,12 +21,7 @@ select date '2020-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 2020-01-01中文(line 1, pos 7) - -== SQL == -select date '2020-01-01中文' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -61,12 +56,7 @@ select date'015' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 015(line 1, pos 7) - -== SQL == -select date'015' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -75,12 +65,7 @@ select date'2021-4294967297-11' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 2021-4294967297-11(line 1, pos 7) - -== SQL == -select date'2021-4294967297-11' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -137,7 +122,7 @@ select to_date("02-29", "MM-dd") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Invalid date 'February 29' as '1970' is not a leap year. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Invalid date 'February 29' as '1970' is not a leap year","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -229,10 +214,7 @@ select next_day("xx", "Mon") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value 'xx' of the type "STRING" cannot be cast to "DATE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select next_day("xx", "Mon") - ^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'xx'","\"STRING\"","\"DATE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"next_day(\"xx\", \"Mon\""}]} -- !query @@ -289,7 +271,7 @@ select date_add('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -298,7 +280,7 @@ select date_add('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -307,7 +289,7 @@ select date_add('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -324,10 +306,7 @@ select date_add('2011-11-11', '1.2') struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '1.2' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select date_add('2011-11-11', '1.2') - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1.2'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":35,"fragment":"date_add('2011-11-11', '1.2'"}]} -- !query @@ -400,7 +379,7 @@ select date_sub('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -409,7 +388,7 @@ select date_sub('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -418,7 +397,7 @@ select date_sub('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -435,10 +414,7 @@ select date_sub(date'2011-11-11', '1.2') struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value '1.2' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select date_sub(date'2011-11-11', '1.2') - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1.2'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":39,"fragment":"date_sub(date'2011-11-11', '1.2'"}]} -- !query @@ -511,7 +487,7 @@ select date '2011-11-11' + 1E1 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -600,7 +576,7 @@ select date'2011-11-11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DATE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DATE)' is of date type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -609,7 +585,7 @@ select '1' + date'2011-11-11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('1' AS DATE), DATE '2011-11-11')' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'DATE '2011-11-11'' is of date type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -647,8 +623,7 @@ select to_date('26/October/2015', 'dd/MMMMM/yyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -657,8 +632,7 @@ select from_json('{"d":"26/October/2015"}', 'd Date', map('dateFormat', 'dd/MMMM struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -667,8 +641,7 @@ select from_csv('26/October/2015', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy')) struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out index eb93ac342ed49..e554d681cdd2a 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out @@ -14,8 +14,7 @@ select to_timestamp('1', 'yy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '1' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'1'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -24,7 +23,7 @@ select to_timestamp('-12', 'yy') struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '-12' could not be parsed at index 0. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '-12' could not be parsed at index 0","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -33,8 +32,7 @@ select to_timestamp('123', 'yy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '123' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'123'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -43,8 +41,7 @@ select to_timestamp('1', 'yyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '1' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'1'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -53,8 +50,7 @@ select to_timestamp('1234567', 'yyyyyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'yyyyyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -63,7 +59,7 @@ select to_timestamp('366', 'D') struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Invalid date 'DayOfYear 366' as '1970' is not a leap year. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Invalid date 'DayOfYear 366' as '1970' is not a leap year","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -72,8 +68,7 @@ select to_timestamp('9', 'DD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '9' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'9'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -82,8 +77,7 @@ select to_timestamp('9', 'DDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '9' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'9'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -92,8 +86,7 @@ select to_timestamp('99', 'DDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '99' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'99'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -102,7 +95,7 @@ select to_timestamp('30-365', 'dd-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Conflict found: Field DayOfMonth 30 differs from DayOfMonth 31 derived from 1970-12-31. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Conflict found: Field DayOfMonth 30 differs from DayOfMonth 31 derived from 1970-12-31","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -111,7 +104,7 @@ select to_timestamp('11-365', 'MM-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Conflict found: Field MonthOfYear 11 differs from MonthOfYear 12 derived from 1970-12-31. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Conflict found: Field MonthOfYear 11 differs from MonthOfYear 12 derived from 1970-12-31","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -120,7 +113,7 @@ select to_timestamp('2019-366', 'yyyy-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '2019-366' could not be parsed: Invalid date 'DayOfYear 366' as '2019' is not a leap year. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2019-366' could not be parsed: Invalid date 'DayOfYear 366' as '2019' is not a leap year","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -129,7 +122,7 @@ select to_timestamp('12-30-365', 'MM-dd-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Conflict found: Field DayOfMonth 30 differs from DayOfMonth 31 derived from 1970-12-31. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Conflict found: Field DayOfMonth 30 differs from DayOfMonth 31 derived from 1970-12-31","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -138,7 +131,7 @@ select to_timestamp('2020-01-365', 'yyyy-dd-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '2020-01-365' could not be parsed: Conflict found: Field DayOfMonth 30 differs from DayOfMonth 1 derived from 2020-12-30. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-01-365' could not be parsed: Conflict found: Field DayOfMonth 30 differs from DayOfMonth 1 derived from 2020-12-30","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -147,7 +140,7 @@ select to_timestamp('2020-10-350', 'yyyy-MM-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '2020-10-350' could not be parsed: Conflict found: Field MonthOfYear 12 differs from MonthOfYear 10 derived from 2020-12-15. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-10-350' could not be parsed: Conflict found: Field MonthOfYear 12 differs from MonthOfYear 10 derived from 2020-12-15","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -156,7 +149,7 @@ select to_timestamp('2020-11-31-366', 'yyyy-MM-dd-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '2020-11-31-366' could not be parsed: Invalid date 'NOVEMBER 31'. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-11-31-366' could not be parsed: Invalid date 'NOVEMBER 31'","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -165,8 +158,7 @@ select from_csv('2018-366', 'date Date', map('dateFormat', 'yyyy-DDD')) struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '2018-366' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'2018-366'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -175,7 +167,7 @@ select to_date("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '2020-01-27T20:06:11.847' could not be parsed at index 10. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-01-27T20:06:11.847' could not be parsed at index 10","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -184,7 +176,7 @@ select to_date("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text 'Unparseable' could not be parsed at index 0. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text 'Unparseable' could not be parsed at index 0","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -193,7 +185,7 @@ select to_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '2020-01-27T20:06:11.847' could not be parsed at index 10. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-01-27T20:06:11.847' could not be parsed at index 10","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -202,7 +194,7 @@ select to_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text 'Unparseable' could not be parsed at index 0. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text 'Unparseable' could not be parsed at index 0","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -211,7 +203,7 @@ select unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '2020-01-27T20:06:11.847' could not be parsed at index 10. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-01-27T20:06:11.847' could not be parsed at index 10","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -220,7 +212,7 @@ select unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text 'Unparseable' could not be parsed at index 0. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text 'Unparseable' could not be parsed at index 0","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -229,7 +221,7 @@ select to_unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '2020-01-27T20:06:11.847' could not be parsed at index 10. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-01-27T20:06:11.847' could not be parsed at index 10","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -238,7 +230,7 @@ select to_unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text 'Unparseable' could not be parsed at index 0. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text 'Unparseable' could not be parsed at index 0","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -247,10 +239,7 @@ select cast("Unparseable" as timestamp) struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value 'Unparseable' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast("Unparseable" as timestamp) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'Unparseable'","\"STRING\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":38,"fragment":"cast(\"Unparseable\" as timestamp"}]} -- !query @@ -259,7 +248,4 @@ select cast("Unparseable" as date) struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value 'Unparseable' of the type "STRING" cannot be cast to "DATE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select cast("Unparseable" as date) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'Unparseable'","\"STRING\"","\"DATE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":33,"fragment":"cast(\"Unparseable\" as date"}]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out index ab46a74f9c9cf..3dabee1ab640d 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out @@ -73,10 +73,7 @@ select (5e36BD + 0.1) + 5e36BD struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 10000000000000000000000000000000000000.1, 39, 1) cannot be represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select (5e36BD + 0.1) + 5e36BD - ^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 10000000000000000000000000000000000000.1, 39, 1)","38","1","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"(5e36BD + 0.1) + 5e36B"}]} -- !query @@ -85,10 +82,7 @@ select (-4e36BD - 0.1) - 7e36BD struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, -11000000000000000000000000000000000000.1, 39, 1) cannot be represented as Decimal(38, 1). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select (-4e36BD - 0.1) - 7e36BD - ^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, -11000000000000000000000000000000000000.1, 39, 1)","38","1","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":30,"fragment":"(-4e36BD - 0.1) - 7e36B"}]} -- !query @@ -97,10 +91,7 @@ select 12345678901234567890.0 * 12345678901234567890.0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 152415787532388367501905199875019052100, 39, 0) cannot be represented as Decimal(38, 2). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select 12345678901234567890.0 * 12345678901234567890.0 - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 152415787532388367501905199875019052100, 39, 0)","38","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":53,"fragment":"12345678901234567890.0 * 12345678901234567890."}]} -- !query @@ -109,10 +100,7 @@ select 1e35BD / 0.1 struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 1000000000000000000000000000000000000.00000000000000000000000000000000000000, 75, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select 1e35BD / 0.1 - ^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 1000000000000000000000000000000000000.00000000000000000000000000000000000000, 75, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":18,"fragment":"1e35BD / 0."}]} -- !query @@ -145,10 +133,7 @@ select 1.0123456789012345678901234567890123456e36BD / 0.1 struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 10123456789012345678901234567890123456.00000000000000000000000000000000000000, 76, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select 1.0123456789012345678901234567890123456e36BD / 0.1 - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 10123456789012345678901234567890123456.00000000000000000000000000000000000000, 76, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e36BD / 0."}]} -- !query @@ -157,10 +142,7 @@ select 1.0123456789012345678901234567890123456e35BD / 1.0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 101234567890123456789012345678901234.56000000000000000000000000000000000000, 74, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select 1.0123456789012345678901234567890123456e35BD / 1.0 - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 101234567890123456789012345678901234.56000000000000000000000000000000000000, 74, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e35BD / 1."}]} -- !query @@ -169,10 +151,7 @@ select 1.0123456789012345678901234567890123456e34BD / 1.0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 10123456789012345678901234567890123.45600000000000000000000000000000000000, 73, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select 1.0123456789012345678901234567890123456e34BD / 1.0 - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 10123456789012345678901234567890123.45600000000000000000000000000000000000, 73, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e34BD / 1."}]} -- !query @@ -181,10 +160,7 @@ select 1.0123456789012345678901234567890123456e33BD / 1.0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 1012345678901234567890123456789012.34560000000000000000000000000000000000, 72, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select 1.0123456789012345678901234567890123456e33BD / 1.0 - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 1012345678901234567890123456789012.34560000000000000000000000000000000000, 72, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e33BD / 1."}]} -- !query @@ -193,10 +169,7 @@ select 1.0123456789012345678901234567890123456e32BD / 1.0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 101234567890123456789012345678901.23456000000000000000000000000000000000, 71, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select 1.0123456789012345678901234567890123456e32BD / 1.0 - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 101234567890123456789012345678901.23456000000000000000000000000000000000, 71, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e32BD / 1."}]} -- !query @@ -213,10 +186,7 @@ select 1.0123456789012345678901234567890123456e31BD / 0.1 struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 101234567890123456789012345678901.23456000000000000000000000000000000000, 71, 38) cannot be represented as Decimal(38, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select 1.0123456789012345678901234567890123456e31BD / 0.1 - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 101234567890123456789012345678901.23456000000000000000000000000000000000, 71, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e31BD / 0."}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out index c6bbb4fb7179a..2d1d9aedc6d17 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out @@ -17,7 +17,7 @@ select upper(x -> x) as v struct<> -- !query output org.apache.spark.sql.AnalysisException -A lambda function should only be used in a higher order function. However, its class is org.apache.spark.sql.catalyst.expressions.Upper, which is not a higher order function.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 19e8fbc8c9fe2..d4da6adab44a2 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -13,12 +13,7 @@ select interval 4 month 2 weeks 3 microseconds * 1.5 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval 4 month 2 weeks 3 microseconds(line 1, pos 7) - -== SQL == -select interval 4 month 2 weeks 3 microseconds * 1.5 --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -119,10 +114,7 @@ select interval 2 second * 'a' struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select interval 2 second * 'a' - ^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"interval 2 second * 'a"}]} -- !query @@ -131,10 +123,7 @@ select interval 2 second / 'a' struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select interval 2 second / 'a' - ^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"interval 2 second / 'a"}]} -- !query @@ -143,10 +132,7 @@ select interval 2 year * 'a' struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select interval 2 year * 'a' - ^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"interval 2 year * 'a"}]} -- !query @@ -155,10 +141,7 @@ select interval 2 year / 'a' struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select interval 2 year / 'a' - ^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"interval 2 year / 'a"}]} -- !query @@ -183,10 +166,7 @@ select 'a' * interval 2 second struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select 'a' * interval 2 second - ^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"'a' * interval 2 secon"}]} -- !query @@ -195,10 +175,7 @@ select 'a' * interval 2 year struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select 'a' * interval 2 year - ^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"'a' * interval 2 yea"}]} -- !query @@ -207,7 +184,7 @@ select '2' / interval 2 second struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('2' / INTERVAL '02' SECOND)' due to data type mismatch: differing types in '('2' / INTERVAL '02' SECOND)' (string and interval second).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -216,7 +193,7 @@ select '2' / interval 2 year struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('2' / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '('2' / INTERVAL '2' YEAR)' (string and interval year).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -225,10 +202,7 @@ select interval '2 seconds' / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[INTERVAL_DIVIDED_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. -== SQL(line 1, position 8) == -select interval '2 seconds' / 0 - ^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INTERVAL_DIVIDED_BY_ZERO","sqlState":"22012","messageParameters":[],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":30,"fragment":"interval '2 seconds' / "}]} -- !query @@ -261,10 +235,7 @@ select interval '2' year / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[INTERVAL_DIVIDED_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. -== SQL(line 1, position 8) == -select interval '2' year / 0 - ^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INTERVAL_DIVIDED_BY_ZERO","sqlState":"22012","messageParameters":[],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"interval '2' year / "}]} -- !query @@ -297,7 +268,7 @@ select 2 / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(2 / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(2 / INTERVAL '2' YEAR)' (int and interval year).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -306,7 +277,7 @@ select 2 / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(2 / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(2 / INTERVAL '02' HOUR)' (int and interval hour).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -315,7 +286,7 @@ select null / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(NULL / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(NULL / INTERVAL '2' YEAR)' (void and interval year).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -324,7 +295,7 @@ select null / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(NULL / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(NULL / INTERVAL '02' HOUR)' (void and interval hour).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -333,12 +304,7 @@ select -interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8) - -== SQL == -select -interval '-1 month 1 day -1 second' ---------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -363,12 +329,7 @@ select -interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8) - -== SQL == -select -interval -1 month 1 day -1 second ---------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -393,12 +354,7 @@ select +interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8) - -== SQL == -select +interval '-1 month 1 day -1 second' ---------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -423,12 +379,7 @@ select +interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8) - -== SQL == -select +interval -1 month 1 day -1 second ---------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -661,10 +612,7 @@ select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(expanded, 1234567890123456789, 20, 0) cannot be represented as Decimal(18, 6). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 1234567890123456789, 20, 0)","18","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":58,"fragment":"make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789"}]} -- !query @@ -844,12 +792,7 @@ select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisec struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond(line 1, pos 7) - -== SQL == -select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -882,12 +825,7 @@ select interval '30' year '25' month '-100' day '40' hour '80' minute '299.88998 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second(line 1, pos 7) - -== SQL == -select interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1016,12 +954,7 @@ select interval '20 15:40:32.99899999' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '20 15:40:32.99899999' day to hour -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1030,12 +963,7 @@ select interval '20 15:40:32.99899999' day to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '20 15:40:32.99899999' day to minute -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1044,12 +972,7 @@ select interval '15:40:32.99899999' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '15:40:32.99899999' hour to minute -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1058,12 +981,7 @@ select interval '15:40.99899999' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '15:40.99899999' hour to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1072,12 +990,7 @@ select interval '15:40' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '15:40' hour to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1086,12 +999,7 @@ select interval '20 40:32.99899999' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '20 40:32.99899999' minute to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1100,12 +1008,7 @@ select interval 10 nanoseconds struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Error parsing ' 10 nanoseconds' to interval, invalid unit 'nanoseconds'(line 1, pos 16) - -== SQL == -select interval 10 nanoseconds -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1194,12 +1097,7 @@ select interval struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -at least one time unit should be given for interval literal(line 1, pos 7) - -== SQL == -select interval --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1208,12 +1106,7 @@ select interval 1 fake_unit struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Error parsing ' 1 fake_unit' to interval, invalid unit 'fake_unit'(line 1, pos 16) - -== SQL == -select interval 1 fake_unit -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1222,12 +1115,7 @@ select interval 1 year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -The value of from-to unit must be a string(line 1, pos 16) - -== SQL == -select interval 1 year to month -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1236,12 +1124,7 @@ select interval '1' year to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Intervals FROM year TO second are not supported.(line 1, pos 16) - -== SQL == -select interval '1' year to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1250,12 +1133,7 @@ select interval '10-9' year to month '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) - -== SQL == -select interval '10-9' year to month '2-1' year to month --------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1264,12 +1142,7 @@ select interval '10-9' year to month '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) - -== SQL == -select interval '10-9' year to month '12:11:10' hour to second --------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1278,12 +1151,7 @@ select interval '1 15:11' day to minute '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 40) - -== SQL == -select interval '1 15:11' day to minute '12:11:10' hour to second -----------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1292,12 +1160,7 @@ select interval 1 year '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 23) - -== SQL == -select interval 1 year '2-1' year to month ------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1306,12 +1169,7 @@ select interval 1 year '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 23) - -== SQL == -select interval 1 year '12:11:10' hour to second ------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1320,12 +1178,7 @@ select interval '10-9' year to month '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) - -== SQL == -select interval '10-9' year to month '1' year --------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1334,12 +1187,7 @@ select interval '12:11:10' hour to second '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 42) - -== SQL == -select interval '12:11:10' hour to second '1' year -------------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1348,7 +1196,7 @@ select interval (-30) day struct<> -- !query output org.apache.spark.sql.AnalysisException -Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1357,7 +1205,7 @@ select interval (a + 1) day struct<> -- !query output org.apache.spark.sql.AnalysisException -Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1366,12 +1214,7 @@ select interval 30 day day day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'day': extra input 'day'(line 1, pos 27) - -== SQL == -select interval 30 day day day ----------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'day'",": extra input 'day'"],"queryContext":[]} -- !query @@ -1380,7 +1223,7 @@ select interval (-30) days struct<> -- !query output org.apache.spark.sql.AnalysisException -Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1389,7 +1232,7 @@ select interval (a + 1) days struct<> -- !query output org.apache.spark.sql.AnalysisException -Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1398,12 +1241,7 @@ select interval 30 days days days struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'days': extra input 'days'(line 1, pos 29) - -== SQL == -select interval 30 days days days ------------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'days'",": extra input 'days'"],"queryContext":[]} -- !query @@ -1420,12 +1258,7 @@ SELECT INTERVAL '178956970-8' YEAR TO MONTH struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Error parsing interval year-month string: integer overflow(line 1, pos 16) - -== SQL == -SELECT INTERVAL '178956970-8' YEAR TO MONTH -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1470,7 +1303,7 @@ select struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '2' YEAR + '3-3 year to month')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + '3-3 year to month')' (interval year and string).; line 2 pos 2 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1495,7 +1328,7 @@ select interval '2' year + '3-3' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '2' YEAR + '3-3')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + '3-3')' (interval year and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1504,7 +1337,7 @@ select interval '2' year - '4' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '2' YEAR - '4')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - '4')' (interval year and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1513,10 +1346,7 @@ select '4 11:11' - interval '4 22:12' day to minute struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value '4 11:11' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select '4 11:11' - interval '4 22:12' day to minute - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'4 11:11'","\"STRING\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":50,"fragment":"'4 11:11' - interval '4 22:12' day to minut"}]} -- !query @@ -1525,10 +1355,7 @@ select '4 12:12:12' + interval '4 22:12' day to minute struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value '4 12:12:12' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select '4 12:12:12' + interval '4 22:12' day to minute - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'4 12:12:12'","\"STRING\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":53,"fragment":"'4 12:12:12' + interval '4 22:12' day to minut"}]} -- !query @@ -1545,7 +1372,7 @@ select interval '2' year + str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '2' YEAR + interval_view.str)' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + interval_view.str)' (interval year and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1554,7 +1381,7 @@ select interval '2' year - str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '2' YEAR - interval_view.str)' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - interval_view.str)' (interval year and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1563,10 +1390,7 @@ select str - interval '4 22:12' day to minute from interval_view struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value '1' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select str - interval '4 22:12' day to minute from interval_view - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1'","\"STRING\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":44,"fragment":"str - interval '4 22:12' day to minut"}]} -- !query @@ -1575,10 +1399,7 @@ select str + interval '4 22:12' day to minute from interval_view struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value '1' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select str + interval '4 22:12' day to minute from interval_view - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1'","\"STRING\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":44,"fragment":"str + interval '4 22:12' day to minut"}]} -- !query @@ -1587,7 +1408,7 @@ select interval '2-2' year to month + interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1596,7 +1417,7 @@ select interval '3' day + interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1605,7 +1426,7 @@ select interval '2-2' year to month - interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1614,7 +1435,7 @@ select interval '3' day - interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' due to data type mismatch: differing types in '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' (interval day and interval year to month).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1623,7 +1444,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1632,7 +1453,7 @@ select 1 + interval '2' month struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(1 + INTERVAL '2' MONTH)' due to data type mismatch: differing types in '(1 + INTERVAL '2' MONTH)' (int and interval month).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1641,7 +1462,7 @@ select interval '2' second + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '1 + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1650,7 +1471,7 @@ select interval '2' month - 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '2' MONTH - 1)' due to data type mismatch: differing types in '(INTERVAL '2' MONTH - 1)' (interval month and int).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1707,12 +1528,7 @@ select interval '-\t2-2\t' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match year-month format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: - 2-2 (line 1, pos 16) - -== SQL == -select interval '-\t2-2\t' year to month -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1729,13 +1545,7 @@ select interval '\n-\t10\t 12:34:46.789\t' day to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: -- 10 12:34:46.789 , set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '\n-\t10\t 12:34:46.789\t' day to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1744,12 +1554,7 @@ select interval '中文 interval 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: 中文 interval 1 day(line 1, pos 7) - -== SQL == -select interval '中文 interval 1 day' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1758,12 +1563,7 @@ select interval 'interval中文 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: interval中文 1 day(line 1, pos 7) - -== SQL == -select interval 'interval中文 1 day' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1772,12 +1572,7 @@ select interval 'interval 1中文day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: interval 1中文day(line 1, pos 7) - -== SQL == -select interval 'interval 1中文day' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1785,8 +1580,8 @@ select -(a) from values (interval '-2147483648 months', interval '2147483647 mon -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] integer overflow. +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1794,8 +1589,8 @@ select a - b from values (interval '-2147483648 months', interval '2147483647 mo -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] integer overflow. Use 'try_subtract' to tolerate overflow and return NULL instead. +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1803,8 +1598,8 @@ select b + interval '1 month' from values (interval '-2147483648 months', interv -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] integer overflow. Use 'try_add' to tolerate overflow and return NULL instead. +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1812,8 +1607,8 @@ select a * 1.1 from values (interval '-2147483648 months', interval '2147483647 -- !query schema struct<> -- !query output -java.lang.ArithmeticException -Overflow +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1821,8 +1616,8 @@ select a / 0.5 from values (interval '-2147483648 months', interval '2147483647 -- !query schema struct<> -- !query output -java.lang.ArithmeticException -Overflow +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1883,12 +1678,7 @@ select interval '+' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: +(line 1, pos 7) - -== SQL == -select interval '+' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1897,12 +1687,7 @@ select interval '+.' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: +.(line 1, pos 7) - -== SQL == -select interval '+.' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1911,12 +1696,7 @@ select interval '1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: 1(line 1, pos 7) - -== SQL == -select interval '1' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1925,12 +1705,7 @@ select interval '1.2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: 1.2(line 1, pos 7) - -== SQL == -select interval '1.2' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1939,12 +1714,7 @@ select interval '- 2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: - 2(line 1, pos 7) - -== SQL == -select interval '- 2' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1953,12 +1723,7 @@ select interval '1 day -' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: 1 day -(line 1, pos 7) - -== SQL == -select interval '1 day -' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1967,12 +1732,7 @@ select interval '1 day 1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: 1 day 1(line 1, pos 7) - -== SQL == -select interval '1 day 1' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1981,12 +1741,7 @@ select interval '1 day 2' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: 1 day 2(line 1, pos 16) - -== SQL == -select interval '1 day 2' day -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1995,12 +1750,7 @@ select interval 'interval 1' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: interval 1(line 1, pos 16) - -== SQL == -select interval 'interval 1' day -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2033,10 +1783,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1 struct<> -- !query output org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] Interval value overflows after being divided by -1. Use 'try_divide' to tolerate overflow and return NULL instead. -== SQL(line 1, position 8) == -SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1 - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":50,"fragment":"(INTERVAL '-178956970-8' YEAR TO MONTH) / -"}]} -- !query @@ -2045,10 +1792,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L struct<> -- !query output org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] Interval value overflows after being divided by -1. Use 'try_divide' to tolerate overflow and return NULL instead. -== SQL(line 1, position 8) == -SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":51,"fragment":"(INTERVAL '-178956970-8' YEAR TO MONTH) / -1"}]} -- !query @@ -2091,10 +1835,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1 struct<> -- !query output org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] Interval value overflows after being divided by -1. Use 'try_divide' to tolerate overflow and return NULL instead. -== SQL(line 1, position 8) == -SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1 - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":64,"fragment":"(INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -"}]} -- !query @@ -2103,10 +1844,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L struct<> -- !query output org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] Interval value overflows after being divided by -1. Use 'try_divide' to tolerate overflow and return NULL instead. -== SQL(line 1, position 8) == -SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":65,"fragment":"(INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1"}]} -- !query @@ -2229,12 +1967,7 @@ SELECT INTERVAL '106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -requirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16) - -== SQL == -SELECT INTERVAL '106751992 04' DAY TO HOUR -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2243,12 +1976,7 @@ SELECT INTERVAL '-106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -requirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16) - -== SQL == -SELECT INTERVAL '-106751992 04' DAY TO HOUR -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2257,12 +1985,7 @@ SELECT INTERVAL '2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -requirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16) - -== SQL == -SELECT INTERVAL '2562047789:00' HOUR TO MINUTE -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2271,12 +1994,7 @@ SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -requirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16) - -== SQL == -SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2285,12 +2003,7 @@ SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -requirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16) - -== SQL == -SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2299,12 +2012,7 @@ SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -requirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16) - -== SQL == -SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2441,7 +2149,7 @@ SELECT INTERVAL 1 MONTH > INTERVAL 20 DAYS struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' (interval month and interval day).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2450,7 +2158,7 @@ SELECT INTERVAL '1' DAY < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' DAY < '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY < '1')' (interval day and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2459,7 +2167,7 @@ SELECT INTERVAL '1' DAY = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' DAY = '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY = '1')' (interval day and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2468,7 +2176,7 @@ SELECT INTERVAL '1' DAY > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' DAY > '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY > '1')' (interval day and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2477,7 +2185,7 @@ SELECT '1' < INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' < INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' < INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2486,7 +2194,7 @@ SELECT '1' = INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' = INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' = INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2495,7 +2203,7 @@ SELECT '1' > INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' > INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' > INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2504,7 +2212,7 @@ SELECT INTERVAL '1' YEAR < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' YEAR < '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR < '1')' (interval year and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2513,7 +2221,7 @@ SELECT INTERVAL '1' YEAR = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' YEAR = '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR = '1')' (interval year and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2522,7 +2230,7 @@ SELECT INTERVAL '1' YEAR > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' YEAR > '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR > '1')' (interval year and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2531,7 +2239,7 @@ SELECT '1' < INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' < INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' < INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2540,7 +2248,7 @@ SELECT '1' = INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' = INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' = INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2549,7 +2257,7 @@ SELECT '1' > INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' > INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' > INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2574,7 +2282,7 @@ SELECT array(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'array(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function array should all be the same type, but it's [interval month, interval day]; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2599,7 +2307,7 @@ SELECT coalesce(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function coalesce should all be the same type, but it's [interval month, interval day]; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2656,7 +2364,7 @@ SELECT div(INTERVAL '1' MONTH, INTERVAL '-1' DAY) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' (interval month and interval day).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out index 6edd513ea2ffa..b40cf817fb328 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out @@ -37,12 +37,7 @@ select 128Y struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Numeric literal 128 does not fit in range [-128, 127] for type tinyint(line 1, pos 7) - -== SQL == -select 128Y --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -67,12 +62,7 @@ select 32768S struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Numeric literal 32768 does not fit in range [-32768, 32767] for type smallint(line 1, pos 7) - -== SQL == -select 32768S --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -97,12 +87,7 @@ select 9223372036854775808L struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Numeric literal 9223372036854775808 does not fit in range [-9223372036854775808, 9223372036854775807] for type bigint(line 1, pos 7) - -== SQL == -select 9223372036854775808L --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -143,10 +128,7 @@ select 1234567890123456789012345678901234567890 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -decimal can only support precision up to 38 -== SQL == -select 1234567890123456789012345678901234567890 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -155,10 +137,7 @@ select 1234567890123456789012345678901234567890.0 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -decimal can only support precision up to 38 -== SQL == -select 1234567890123456789012345678901234567890.0 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -183,12 +162,7 @@ select -3.4028235E39f struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Numeric literal -3.4028235E39 does not fit in range [-3.4028234663852886E+38, 3.4028234663852886E+38] for type float(line 1, pos 7) - -== SQL == -select -3.4028235E39f --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -213,12 +187,7 @@ select .e3 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near '.'(line 1, pos 7) - -== SQL == -select .e3 --------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'.'",""],"queryContext":[]} -- !query @@ -227,12 +196,7 @@ select 1E309, -1E309 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Numeric literal 1E309 does not fit in range [-1.7976931348623157E+308, 1.7976931348623157E+308] for type double(line 1, pos 7) - -== SQL == -select 1E309, -1E309 --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -331,12 +295,7 @@ select date 'mar 11 2016' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: mar 11 2016(line 1, pos 7) - -== SQL == -select date 'mar 11 2016' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -353,12 +312,7 @@ select timestamp '2016-33-11 20:54:00.000' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 2016-33-11 20:54:00.000(line 1, pos 7) - -== SQL == -select timestamp '2016-33-11 20:54:00.000' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -367,12 +321,7 @@ select GEO '(10,-6)' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Literals of type 'GEO' are currently not supported.(line 1, pos 7) - -== SQL == -select GEO '(10,-6)' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -389,12 +338,7 @@ select 1.20E-38BD struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -decimal can only support precision up to 38(line 1, pos 7) - -== SQL == -select 1.20E-38BD --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -411,12 +355,7 @@ select X'XuZ' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -contains illegal character for hexBinary: 0XuZ(line 1, pos 7) - -== SQL == -select X'XuZ' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -433,7 +372,7 @@ select +date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(+ DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -442,7 +381,7 @@ select +timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -459,7 +398,7 @@ select +map(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(+ map(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'map(1, 2)' is of map type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -468,7 +407,7 @@ select +array(1,2) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(+ array(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'array(1, 2)' is of array type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -477,7 +416,7 @@ select +named_struct('a', 1, 'b', 'spark') struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(+ named_struct('a', 1, 'b', 'spark'))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'named_struct('a', 1, 'b', 'spark')' is of struct type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -486,7 +425,7 @@ select +X'1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(+ X'01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'01'' is of binary type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -495,7 +434,7 @@ select -date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(- DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -504,7 +443,7 @@ select -timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(- TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -513,4 +452,4 @@ select -x'2379ACFe' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(- X'2379ACFE')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'2379ACFE'' is of binary type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out index c9d4186a27b42..0b2d09734dab1 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out @@ -5,10 +5,7 @@ select element_at(map(1, 'a', 2, 'b'), 5) struct<> -- !query output org.apache.spark.SparkNoSuchElementException -[MAP_KEY_DOES_NOT_EXIST] Key 5 does not exist. Use `try_element_at` to tolerate non-existent key and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select element_at(map(1, 'a', 2, 'b'), 5) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"MAP_KEY_DOES_NOT_EXIST","messageParameters":["5","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":40,"fragment":"element_at(map(1, 'a', 2, 'b'), 5"}]} -- !query @@ -17,10 +14,7 @@ select map(1, 'a', 2, 'b')[5] struct<> -- !query output org.apache.spark.SparkNoSuchElementException -[MAP_KEY_DOES_NOT_EXIST] Key 5 does not exist. Use `try_element_at` to tolerate non-existent key and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select map(1, 'a', 2, 'b')[5] - ^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"MAP_KEY_DOES_NOT_EXIST","messageParameters":["5","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":28,"fragment":"map(1, 'a', 2, 'b')[5"}]} -- !query @@ -77,7 +71,7 @@ select map_contains_key(map('1', 'a', '2', 'b'), 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'map_contains_key(map('1', 'a', '2', 'b'), 1)' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, int].; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -86,7 +80,7 @@ select map_contains_key(map(1, 'a', 2, 'b'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'map_contains_key(map(1, 'a', 2, 'b'), '1')' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, string].; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -111,10 +105,7 @@ select element_at(map(1, 'a', 2, 'b'), 5) struct<> -- !query output org.apache.spark.SparkNoSuchElementException -[MAP_KEY_DOES_NOT_EXIST] Key 5 does not exist. Use `try_element_at` to tolerate non-existent key and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select element_at(map(1, 'a', 2, 'b'), 5) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"MAP_KEY_DOES_NOT_EXIST","messageParameters":["5","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":40,"fragment":"element_at(map(1, 'a', 2, 'b'), 5"}]} -- !query @@ -123,7 +114,4 @@ select element_at(map('a', 1, 'b', 2), 'c') struct<> -- !query output org.apache.spark.SparkNoSuchElementException -[MAP_KEY_DOES_NOT_EXIST] Key 'c' does not exist. Use `try_element_at` to tolerate non-existent key and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select element_at(map('a', 1, 'b', 2), 'c') - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"MAP_KEY_DOES_NOT_EXIST","messageParameters":["'c'","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":42,"fragment":"element_at(map('a', 1, 'b', 2), 'c'"}]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out index ffa600f66fd10..c4f09a811da7e 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out @@ -5,7 +5,7 @@ select concat_ws() struct<> -- !query output org.apache.spark.sql.AnalysisException -requirement failed: concat_ws requires at least one argument.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -14,7 +14,7 @@ select format_string() struct<> -- !query output org.apache.spark.sql.AnalysisException -requirement failed: format_string() should take at least 1 argument; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -79,10 +79,7 @@ select left("abcd", -2), left("abcd", 0), left("abcd", 'a') struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 43) == -...t("abcd", -2), left("abcd", 0), left("abcd", 'a') - ^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":42,"stopIndex":58,"fragment":"left(\"abcd\", 'a'"}]} -- !query @@ -107,10 +104,7 @@ select right("abcd", -2), right("abcd", 0), right("abcd", 'a') struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'a' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 45) == -...("abcd", -2), right("abcd", 0), right("abcd", 'a') - ^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":44,"stopIndex":61,"fragment":"right(\"abcd\", 'a'"}]} -- !query @@ -175,7 +169,7 @@ SELECT split_part('11.12.13', '.', 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -[ELEMENT_AT_BY_INDEX_ZERO] The index 0 is invalid. An index shall be either < 0 or > 0 (the first element has index 1). +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} -- !query @@ -416,10 +410,7 @@ SELECT lpad('hi', 'invalid_length') struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'invalid_length' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT lpad('hi', 'invalid_length') - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'invalid_length'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":34,"fragment":"lpad('hi', 'invalid_length'"}]} -- !query @@ -428,10 +419,7 @@ SELECT rpad('hi', 'invalid_length') struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'invalid_length' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT rpad('hi', 'invalid_length') - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'invalid_length'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":34,"fragment":"rpad('hi', 'invalid_length'"}]} -- !query @@ -664,7 +652,7 @@ select decode() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -673,7 +661,7 @@ select decode(encode('abc', 'utf-8')) struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1138,7 +1126,7 @@ select to_binary(null, cast(null as int)) struct<> -- !query output org.apache.spark.sql.AnalysisException -The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1147,7 +1135,7 @@ select to_binary('abc', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1156,7 +1144,7 @@ select to_binary('abc', 'invalidFormat') struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid value for the 'format' parameter of function 'to_binary': invalidformat. The value has to be a case-insensitive string literal of 'hex', 'utf-8', or 'base64'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index 5ec7e51f1f2ad..d697772cbf571 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -13,12 +13,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7) - -== SQL == -select timestamp '2019-01-01中文' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -27,12 +22,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7) - -== SQL == -select timestamp'4294967297' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -41,12 +31,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7) - -== SQL == -select timestamp'2021-01-01T12:30:4294967297.123456' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -95,7 +80,7 @@ SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007) struct<> -- !query output org.apache.spark.SparkDateTimeException -[INVALID_FRACTION_OF_SECOND] The fraction of sec must be zero. Valid range is [0, 60]. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"INVALID_FRACTION_OF_SECOND","sqlState":"22023","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -337,7 +322,7 @@ select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '2019-10-06 10:11:12.' could not be parsed at index 20. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2019-10-06 10:11:12.' could not be parsed at index 20","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -402,7 +387,7 @@ select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSS struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '2019-10-06 10:11:12.1234567PST' could not be parsed, unparsed text found at index 26. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2019-10-06 10:11:12.1234567PST' could not be parsed, unparsed text found at index 26","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -419,7 +404,7 @@ select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd H struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '223456 2019-10-06 10:11:12.123456PST' could not be parsed at index 27. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '223456 2019-10-06 10:11:12.123456PST' could not be parsed at index 27","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -484,7 +469,7 @@ select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '12.1232019-10-06S10:11' could not be parsed at index 7. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '12.1232019-10-06S10:11' could not be parsed at index 7","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -493,7 +478,7 @@ select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '12.1232019-10-06S10:11' could not be parsed at index 9. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '12.1232019-10-06S10:11' could not be parsed at index 9","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -566,7 +551,7 @@ select to_timestamp("02-29", "MM-dd") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Invalid date 'February 29' as '1970' is not a leap year. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Invalid date 'February 29' as '1970' is not a leap year","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -671,7 +656,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP))' due to data type mismatch: '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP))' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -680,7 +665,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS TIMESTAMP) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: '(CAST('1' AS TIMESTAMP) + TIMESTAMP '2011-11-11 11:11:11')' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -689,7 +674,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -698,7 +683,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -730,8 +715,7 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'yyyy-MM-dd GGGGG'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -740,8 +724,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -750,8 +733,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -760,8 +742,7 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -770,8 +751,7 @@ select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat' struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -780,8 +760,7 @@ select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMM struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out index 8622b97a20502..7396b252e142e 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out @@ -141,7 +141,7 @@ SELECT try_add(interval 2 year, interval 2 second) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'INTERVAL '2' YEAR + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2' YEAR' is of interval year type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out index a2326ee08145b..2a7e37edccaf8 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out @@ -45,5 +45,4 @@ select try_to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_element_at.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/try_element_at.sql.out index defac2ea5d62c..21b1f73b85f6c 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_element_at.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/try_element_at.sql.out @@ -5,7 +5,7 @@ SELECT try_element_at(array(1, 2, 3), 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -[ELEMENT_AT_BY_INDEX_ZERO] The index 0 is invalid. An index shall be either < 0 or > 0 (the first element has index 1). +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/array.sql.out b/sql/core/src/test/resources/sql-tests/results/array.sql.out index 25bc001d3d681..c8ca2c6aa6e36 100644 --- a/sql/core/src/test/resources/sql-tests/results/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/array.sql.out @@ -128,7 +128,7 @@ select sort_array(array('b', 'd'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'sort_array(array('b', 'd'), '1')' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select sort_array(array('b', 'd'), cast(NULL as boolean)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'sort_array(array('b', 'd'), CAST(NULL AS BOOLEAN))' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -181,7 +181,7 @@ select element_at(array(1, 2, 3), 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -[ELEMENT_AT_BY_INDEX_ZERO] The index 0 is invalid. An index shall be either < 0 or > 0 (the first element has index 1). +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} -- !query @@ -294,4 +294,4 @@ select array_size(map('a', 1, 'b', 2)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'array_size(map('a', 1, 'b', 2))' due to data type mismatch: argument 1 requires array type, however, 'map('a', 1, 'b', 2)' is of map type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out b/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out index 84d2e9e50ba4f..b045e307ce22a 100644 --- a/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out @@ -149,7 +149,7 @@ select bit_count("bit count") struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'bit_count('bit count')' due to data type mismatch: argument 1 requires (integral or boolean) type, however, ''bit count'' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -158,7 +158,7 @@ select bit_count('a') struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'bit_count('a')' due to data type mismatch: argument 1 requires (integral or boolean) type, however, ''a'' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/cast.sql.out index 2b976914bfe98..8e6a5c0404d10 100644 --- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out @@ -350,12 +350,7 @@ SELECT CAST(interval 3 month 1 hour AS string) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval 3 month 1 hour(line 1, pos 12) - -== SQL == -SELECT CAST(interval 3 month 1 hour AS string) -------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -631,7 +626,7 @@ select cast(interval '23:59:59' hour to second as smallint) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value INTERVAL '23:59:59' HOUR TO SECOND of the type "INTERVAL HOUR TO SECOND" cannot be cast to "SMALLINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["INTERVAL '23:59:59' HOUR TO SECOND","\"INTERVAL HOUR TO SECOND\"","\"SMALLINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -656,7 +651,7 @@ select cast(interval '-1000' month as tinyint) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value INTERVAL '-1000' MONTH of the type "INTERVAL MONTH" cannot be cast to "TINYINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["INTERVAL '-1000' MONTH","\"INTERVAL MONTH\"","\"TINYINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -665,7 +660,7 @@ select cast(interval '1000000' second as smallint) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value INTERVAL '1000000' SECOND of the type "INTERVAL SECOND" cannot be cast to "SMALLINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["INTERVAL '1000000' SECOND","\"INTERVAL SECOND\"","\"SMALLINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -730,7 +725,7 @@ select cast(2147483647 as interval year) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value 2147483647 of the type "INT" cannot be cast to "INTERVAL YEAR" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["2147483647","\"INT\"","\"INTERVAL YEAR\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -739,7 +734,7 @@ select cast(-9223372036854775808L as interval day) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value -9223372036854775808L of the type "BIGINT" cannot be cast to "INTERVAL DAY" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["-9223372036854775808L","\"BIGINT\"","\"INTERVAL DAY\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -804,4 +799,4 @@ select cast(interval '10.123' second as decimal(1, 0)) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CANNOT_CHANGE_DECIMAL_PRECISION] Decimal(compact, 10, 18, 6) cannot be represented as Decimal(1, 0). If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(compact, 10, 18, 6)","1","0","\"spark.sql.ansi.enabled\""],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out index 38efdac409231..cd76b53163a99 100644 --- a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out @@ -93,7 +93,7 @@ SELECT CEIL(2.5, null) struct<> -- !query output org.apache.spark.sql.AnalysisException -The 'scale' parameter of function 'ceil' needs to be a int literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -102,7 +102,7 @@ SELECT CEIL(2.5, 'a') struct<> -- !query output org.apache.spark.sql.AnalysisException -The 'scale' parameter of function 'ceil' needs to be a int literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -111,7 +111,7 @@ SELECT CEIL(2.5, 0, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function ceil. Expected: 2; Found: 3; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -208,7 +208,7 @@ SELECT FLOOR(2.5, null) struct<> -- !query output org.apache.spark.sql.AnalysisException -The 'scale' parameter of function 'floor' needs to be a int literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -217,7 +217,7 @@ SELECT FLOOR(2.5, 'a') struct<> -- !query output org.apache.spark.sql.AnalysisException -The 'scale' parameter of function 'floor' needs to be a int literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -226,4 +226,4 @@ SELECT FLOOR(2.5, 0, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function floor. Expected: 2; Found: 3; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out index ed5871d38553d..3b3b4f0fa8e0f 100644 --- a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out @@ -23,12 +23,7 @@ ALTER TABLE test_change CHANGE a struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Operation not allowed: ALTER TABLE table CHANGE COLUMN requires a TYPE, a SET/DROP, a COMMENT, or a FIRST/AFTER(line 1, pos 0) - -== SQL == -ALTER TABLE test_change CHANGE a -^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -47,7 +42,7 @@ ALTER TABLE test_change RENAME COLUMN a TO a1 struct<> -- !query output org.apache.spark.sql.AnalysisException -RENAME COLUMN is only supported with v2 tables. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -66,7 +61,7 @@ ALTER TABLE test_change CHANGE a TYPE STRING struct<> -- !query output org.apache.spark.sql.AnalysisException -ALTER TABLE CHANGE COLUMN is not supported for changing column 'a' with type 'IntegerType' to 'a' with type 'StringType' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -85,7 +80,7 @@ ALTER TABLE test_change CHANGE a AFTER b struct<> -- !query output org.apache.spark.sql.AnalysisException -ALTER COLUMN ... FIRST | ALTER is only supported with v2 tables. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -94,7 +89,7 @@ ALTER TABLE test_change CHANGE b FIRST struct<> -- !query output org.apache.spark.sql.AnalysisException -ALTER COLUMN ... FIRST | ALTER is only supported with v2 tables. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -173,12 +168,7 @@ ALTER TABLE test_change CHANGE invalid_col TYPE INT struct<> -- !query output org.apache.spark.sql.AnalysisException -Missing field invalid_col in table spark_catalog.default.test_change with schema: -root - |-- a: integer (nullable = true) - |-- b: string (nullable = true) - |-- c: integer (nullable = true) -; line 1 pos 0 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -223,7 +213,7 @@ ALTER TABLE temp_view CHANGE a TYPE INT struct<> -- !query output org.apache.spark.sql.AnalysisException -temp_view is a temp view. 'ALTER TABLE ... CHANGE COLUMN' expects a table.; line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -240,7 +230,7 @@ ALTER TABLE global_temp.global_temp_view CHANGE a TYPE INT struct<> -- !query output org.apache.spark.sql.AnalysisException -global_temp.global_temp_view is a temp view. 'ALTER TABLE ... CHANGE COLUMN' expects a table.; line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out index 1f79817787a43..b92f7b38ff234 100644 --- a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out @@ -259,7 +259,7 @@ alter table char_tbl1 change column c type char(6) struct<> -- !query output org.apache.spark.sql.AnalysisException -ALTER TABLE CHANGE COLUMN is not supported for changing column 'c' with type 'CharType(5)' to 'c' with type 'CharType(6)' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -575,7 +575,7 @@ alter table char_part partition (v2='ke') rename to partition (v2='nt') struct<> -- !query output org.apache.spark.sql.AnalysisException -Partition spec is invalid. The spec (v2) must match the partition spec (v2, c2) defined in table '`spark_catalog`.`default`.`char_part`' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out index 9d0b9e444681c..387f661d005a8 100644 --- a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out @@ -69,7 +69,7 @@ SELECT i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -78,7 +78,7 @@ SELECT t1.i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -87,7 +87,7 @@ SELECT mydb1.t1.i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'mydb1.t1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -96,7 +96,7 @@ SELECT i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -105,7 +105,7 @@ SELECT t1.i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -122,7 +122,7 @@ SELECT i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -131,7 +131,7 @@ SELECT t1.i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -140,7 +140,7 @@ SELECT i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -149,7 +149,7 @@ SELECT t1.i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -158,7 +158,7 @@ SELECT db1.t1.i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `db1`.`t1`.`i1` cannot be resolved. Did you mean one of the following? [`spark_catalog`.`mydb2`.`t1`.`i1`, `spark_catalog`.`mydb2`.`t1`.`i1`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`db1`.`t1`.`i1`","`spark_catalog`.`mydb2`.`t1`.`i1`, `spark_catalog`.`mydb2`.`t1`.`i1`"],"queryContext":[]} -- !query @@ -183,7 +183,7 @@ SELECT mydb1.t1 FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `mydb1`.`t1` cannot be resolved. Did you mean one of the following? [`spark_catalog`.`mydb1`.`t1`.`i1`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`mydb1`.`t1`","`spark_catalog`.`mydb1`.`t1`.`i1`"],"queryContext":[]} -- !query @@ -192,7 +192,7 @@ SELECT t1.x.y.* FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 't1.x.y.*' given input columns 'i1'; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -201,7 +201,7 @@ SELECT t1 FROM mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `t1` cannot be resolved. Did you mean one of the following? [`spark_catalog`.`mydb1`.`t1`.`i1`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t1`","`spark_catalog`.`mydb1`.`t1`.`i1`"],"queryContext":[]} -- !query @@ -218,7 +218,7 @@ SELECT mydb1.t1.i1 FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `mydb1`.`t1`.`i1` cannot be resolved. Did you mean one of the following? [`spark_catalog`.`mydb2`.`t1`.`i1`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`mydb1`.`t1`.`i1`","`spark_catalog`.`mydb2`.`t1`.`i1`"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/comments.sql.out b/sql/core/src/test/resources/sql-tests/results/comments.sql.out index f05d188740fae..a9cdb9161687e 100644 --- a/sql/core/src/test/resources/sql-tests/results/comments.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/comments.sql.out @@ -132,20 +132,7 @@ select 1 as a struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Unclosed bracketed comment(line 3, pos 0) - -== SQL == -/*abc*/ -select 1 as a -/* -^^^ - -2 as b -/*abc*/ -, 3 as c - -/**/ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -163,18 +150,4 @@ select 4 as d struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Unclosed bracketed comment(line 3, pos 0) - -== SQL == -/*abc*/ -select 1 as a -/* -^^^ - -2 as b -/*abc*/ -, 3 as c - -/**/ -select 4 as d +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/count.sql.out b/sql/core/src/test/resources/sql-tests/results/count.sql.out index ab9b543a9f80b..93b7a5eeffe42 100644 --- a/sql/core/src/test/resources/sql-tests/results/count.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/count.sql.out @@ -146,7 +146,7 @@ SELECT count() FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'count()' due to data type mismatch: count requires at least one argument. If you have to call the function count without arguments, set the legacy configuration `spark.sql.legacy.allowParameterlessCount` as true; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -179,4 +179,4 @@ SELECT count(testData.*) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -count(testData.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2) +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out index 301e5cc78df4b..44d91d6d32580 100644 --- a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out @@ -21,7 +21,7 @@ select from_csv('1', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -The expression '1' is not a valid schema string.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -30,20 +30,7 @@ select from_csv('1', 'a InvalidType') struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot parse the data type: -[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2) - -== SQL == -a InvalidType ---^^^ - -Failed fallback parsing: -DataType invalidtype is not supported.(line 1, pos 2) - -== SQL == -a InvalidType ---^^^ -; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -52,7 +39,7 @@ select from_csv('1', 'a INT', named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -Must use a map() function for options; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -61,7 +48,7 @@ select from_csv('1', 'a INT', map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -A type of keys and values in map() must be string, but got map; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -70,7 +57,7 @@ select from_csv() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function from_csv. Expected: one of 2 and 3; Found: 0; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -95,7 +82,7 @@ select schema_of_csv(null) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'schema_of_csv(NULL)' due to data type mismatch: The input csv should be a foldable string expression and not null; however, got NULL.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -112,7 +99,7 @@ SELECT schema_of_csv(csvField) FROM csvTable struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'schema_of_csv(csvtable.csvField)' due to data type mismatch: The input csv should be a foldable string expression and not null; however, got csvtable.csvField.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -145,7 +132,7 @@ select to_csv(named_struct('a', 1, 'b', 2), named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -Must use a map() function for options; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -154,4 +141,4 @@ select to_csv(named_struct('a', 1, 'b', 2), map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -A type of keys and values in map() must be string, but got map; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out index 34f11d9da53f6..ddcc30e6597eb 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out @@ -232,4 +232,4 @@ SELECT * FROM t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -Table or view not found: t1; line 5 pos 20 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out b/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out index 13b4d10304e3a..7fb01026703cd 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out @@ -45,7 +45,7 @@ SELECT * FROM t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -82,7 +82,7 @@ SELECT * FROM t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -136,7 +136,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -151,7 +151,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -167,7 +167,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -181,7 +181,7 @@ WHERE c IN ( struct<> -- !query output org.apache.spark.sql.AnalysisException -Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -210,7 +210,7 @@ SELECT * FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -223,7 +223,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/cte.sql.out b/sql/core/src/test/resources/sql-tests/results/cte.sql.out index 21faff0d32187..5b09d8267fffa 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte.sql.out @@ -21,7 +21,7 @@ WITH s AS (SELECT 1 FROM s) SELECT * FROM s struct<> -- !query output org.apache.spark.sql.AnalysisException -Table or view not found: s; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -31,7 +31,7 @@ SELECT * FROM r struct<> -- !query output org.apache.spark.sql.AnalysisException -Table or view not found: r; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -50,7 +50,7 @@ WITH s1 AS (SELECT 1 FROM s2), s2 AS (SELECT 1 FROM s1) SELECT * FROM s1, s2 struct<> -- !query output org.apache.spark.sql.AnalysisException -Table or view not found: s2; line 1 pos 26 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -129,13 +129,7 @@ SELECT * FROM t struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near ')'(line 1, pos 7) - -== SQL == -WITH t() AS (SELECT 1) --------^^^ -SELECT * FROM t +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["')'",""],"queryContext":[]} -- !query @@ -147,15 +141,7 @@ SELECT * FROM t struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -CTE definition can't have duplicate names: 't'.(line 1, pos 0) - -== SQL == -WITH -^^^ - t(x) AS (SELECT 1), - t(x) AS (SELECT 2) -SELECT * FROM t +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out b/sql/core/src/test/resources/sql-tests/results/date.sql.out index 7eb12a635cf45..3e29f8fafa81c 100644 --- a/sql/core/src/test/resources/sql-tests/results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out @@ -21,12 +21,7 @@ select date '2020-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 2020-01-01中文(line 1, pos 7) - -== SQL == -select date '2020-01-01中文' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -59,12 +54,7 @@ select date'015' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 015(line 1, pos 7) - -== SQL == -select date'015' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -73,12 +63,7 @@ select date'2021-4294967297-11' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 2021-4294967297-11(line 1, pos 7) - -== SQL == -select date'2021-4294967297-11' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -281,7 +266,7 @@ select date_add('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -290,7 +275,7 @@ select date_add('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -299,7 +284,7 @@ select date_add('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -316,7 +301,7 @@ select date_add('2011-11-11', '1.2') struct<> -- !query output org.apache.spark.sql.AnalysisException -[SECOND_FUNCTION_ARGUMENT_NOT_INTEGER] The second argument of date_add function needs to be an integer. +{"errorClass":"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER","sqlState":"22023","messageParameters":["date_add"],"queryContext":[]} -- !query @@ -389,7 +374,7 @@ select date_sub('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -398,7 +383,7 @@ select date_sub('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -407,7 +392,7 @@ select date_sub('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -424,7 +409,7 @@ select date_sub(date'2011-11-11', '1.2') struct<> -- !query output org.apache.spark.sql.AnalysisException -[SECOND_FUNCTION_ARGUMENT_NOT_INTEGER] The second argument of date_sub function needs to be an integer. +{"errorClass":"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER","sqlState":"22023","messageParameters":["date_sub"],"queryContext":[]} -- !query @@ -465,7 +450,7 @@ select date_add('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -474,7 +459,7 @@ select date_sub('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -499,7 +484,7 @@ select date '2011-11-11' + 1E1 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -540,7 +525,7 @@ select date '2001-10-01' - '2001-09-28' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(DATE '2001-10-01', CAST('2001-09-28' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('2001-09-28' AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -581,7 +566,7 @@ select date '2001-09-28' - date_str from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(DATE '2001-09-28', CAST(date_view.date_str AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(date_view.date_str AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -590,7 +575,7 @@ select date'2011-11-11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -599,7 +584,7 @@ select '1' + date'2011-11-11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -637,8 +622,7 @@ select to_date('26/October/2015', 'dd/MMMMM/yyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -647,8 +631,7 @@ select from_json('{"d":"26/October/2015"}', 'd Date', map('dateFormat', 'dd/MMMM struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -657,8 +640,7 @@ select from_csv('26/October/2015', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy')) struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out index d565dfd7bae2f..6827bc1546dc2 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out @@ -5,8 +5,7 @@ select date_format('2018-11-17 13:33:33.333', 'GGGGG') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'GGGGG' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'GGGGG'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -15,8 +14,7 @@ select date_format('2018-11-17 13:33:33.333', 'yyyyyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'yyyyyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -43,8 +41,7 @@ select date_format('2018-11-17 13:33:33.333', 'MMMMM') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'MMMMM' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'MMMMM'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -53,8 +50,7 @@ select date_format('2018-11-17 13:33:33.333', 'LLLLL') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'LLLLL' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'LLLLL'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -63,8 +59,7 @@ select date_format('2018-11-17 13:33:33.333', 'EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -73,8 +68,7 @@ select date_format('2018-11-17 13:33:33.333', 'FF') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'FF' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'FF'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -83,8 +77,7 @@ select date_format('2018-11-17 13:33:33.333', 'ddd') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'ddd' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'ddd'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -93,8 +86,7 @@ select date_format('2018-11-17 13:33:33.333', 'DDDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'DDDD' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'DDDD'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -103,8 +95,7 @@ select date_format('2018-11-17 13:33:33.333', 'HHH') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'HHH' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'HHH'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -113,8 +104,7 @@ select date_format('2018-11-17 13:33:33.333', 'hhh') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'hhh' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'hhh'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -123,8 +113,7 @@ select date_format('2018-11-17 13:33:33.333', 'kkk') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'kkk' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'kkk'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -133,8 +122,7 @@ select date_format('2018-11-17 13:33:33.333', 'KKK') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'KKK' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'KKK'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -143,8 +131,7 @@ select date_format('2018-11-17 13:33:33.333', 'mmm') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'mmm' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'mmm'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -153,8 +140,7 @@ select date_format('2018-11-17 13:33:33.333', 'sss') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'sss' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'sss'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -163,8 +149,7 @@ select date_format('2018-11-17 13:33:33.333', 'SSSSSSSSSS') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'SSSSSSSSSS' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'SSSSSSSSSS'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -173,8 +158,7 @@ select date_format('2018-11-17 13:33:33.333', 'aa') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'aa' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'aa'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -192,8 +176,7 @@ select date_format('2018-11-17 13:33:33.333', 'zzzzz') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'zzzzz' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'zzzzz'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -211,8 +194,7 @@ select date_format('2018-11-17 13:33:33.333', 'ZZZZZZ') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'ZZZZZZ' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'ZZZZZZ'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -275,8 +257,7 @@ select date_format('2018-11-17 13:33:33.333', 'Y') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'Y' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'Y'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -285,8 +266,7 @@ select date_format('2018-11-17 13:33:33.333', 'w') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'w' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'w'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -295,8 +275,7 @@ select date_format('2018-11-17 13:33:33.333', 'W') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'W' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'W'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -305,8 +284,7 @@ select date_format('2018-11-17 13:33:33.333', 'u') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'u' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'u'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index 5ff20b35d5efa..e5dd1ddbef80e 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -21,12 +21,7 @@ select date '2020-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 2020-01-01中文(line 1, pos 7) - -== SQL == -select date '2020-01-01中文' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -59,12 +54,7 @@ select date'015' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 015(line 1, pos 7) - -== SQL == -select date'015' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -73,12 +63,7 @@ select date'2021-4294967297-11' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 2021-4294967297-11(line 1, pos 7) - -== SQL == -select date'2021-4294967297-11' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -281,7 +266,7 @@ select date_add('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -290,7 +275,7 @@ select date_add('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -299,7 +284,7 @@ select date_add('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -316,7 +301,7 @@ select date_add('2011-11-11', '1.2') struct<> -- !query output org.apache.spark.sql.AnalysisException -[SECOND_FUNCTION_ARGUMENT_NOT_INTEGER] The second argument of date_add function needs to be an integer. +{"errorClass":"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER","sqlState":"22023","messageParameters":["date_add"],"queryContext":[]} -- !query @@ -389,7 +374,7 @@ select date_sub('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -398,7 +383,7 @@ select date_sub('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -407,7 +392,7 @@ select date_sub('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -424,7 +409,7 @@ select date_sub(date'2011-11-11', '1.2') struct<> -- !query output org.apache.spark.sql.AnalysisException -[SECOND_FUNCTION_ARGUMENT_NOT_INTEGER] The second argument of date_sub function needs to be an integer. +{"errorClass":"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER","sqlState":"22023","messageParameters":["date_sub"],"queryContext":[]} -- !query @@ -465,7 +450,7 @@ select date_add('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -474,7 +459,7 @@ select date_sub('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -499,7 +484,7 @@ select date '2011-11-11' + 1E1 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -540,7 +525,7 @@ select date '2001-10-01' - '2001-09-28' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(DATE '2001-10-01', CAST('2001-09-28' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('2001-09-28' AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -581,7 +566,7 @@ select date '2001-09-28' - date_str from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(DATE '2001-09-28', CAST(date_view.date_str AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(date_view.date_str AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -590,7 +575,7 @@ select date'2011-11-11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -599,7 +584,7 @@ select '1' + date'2011-11-11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -829,12 +814,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7) - -== SQL == -select timestamp '2019-01-01中文' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -843,12 +823,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7) - -== SQL == -select timestamp'4294967297' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -857,12 +832,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7) - -== SQL == -select timestamp'2021-01-01T12:30:4294967297.123456' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1421,7 +1391,7 @@ select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1430,7 +1400,7 @@ select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1463,7 +1433,7 @@ select str - timestamp'2011-11-11 11:11:11' from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(ts_view.str - TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1472,7 +1442,7 @@ select timestamp'2011-11-11 11:11:11' - str from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1481,7 +1451,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1490,7 +1460,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' (double and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1499,7 +1469,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1508,7 +1478,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out index 6de89f9eda4b6..14012127fb28e 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out @@ -14,8 +14,7 @@ select to_timestamp('1', 'yy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '1' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'1'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -32,8 +31,7 @@ select to_timestamp('123', 'yy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '123' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'123'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -42,8 +40,7 @@ select to_timestamp('1', 'yyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '1' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'1'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -52,8 +49,7 @@ select to_timestamp('1234567', 'yyyyyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'yyyyyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'yyyyyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -70,8 +66,7 @@ select to_timestamp('9', 'DD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '9' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'9'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -80,8 +75,7 @@ select to_timestamp('9', 'DDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '9' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'9'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -90,8 +84,7 @@ select to_timestamp('99', 'DDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '99' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'99'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -156,8 +149,7 @@ select from_csv('2018-366', 'date Date', map('dateFormat', 'yyyy-DDD')) struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '2018-366' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'2018-366'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/describe-query.sql.out b/sql/core/src/test/resources/sql-tests/results/describe-query.sql.out index 60b352f04a69b..9b46264fd57c5 100644 --- a/sql/core/src/test/resources/sql-tests/results/describe-query.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/describe-query.sql.out @@ -108,12 +108,7 @@ DESCRIBE INSERT INTO desc_temp1 values (1, 'val1') struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'desc_temp1'(line 1, pos 21) - -== SQL == -DESCRIBE INSERT INTO desc_temp1 values (1, 'val1') ----------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'desc_temp1'",""],"queryContext":[]} -- !query @@ -122,12 +117,7 @@ DESCRIBE INSERT INTO desc_temp1 SELECT * FROM desc_temp2 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'desc_temp1'(line 1, pos 21) - -== SQL == -DESCRIBE INSERT INTO desc_temp1 SELECT * FROM desc_temp2 ----------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'desc_temp1'",""],"queryContext":[]} -- !query @@ -139,15 +129,7 @@ DESCRIBE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'insert'(line 3, pos 5) - -== SQL == -DESCRIBE - FROM desc_temp1 a - insert into desc_temp1 select * ------^^^ - insert into desc_temp2 select * +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'insert'",""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/describe.sql.out b/sql/core/src/test/resources/sql-tests/results/describe.sql.out index b0c4d9caf830d..94f4902ba8d13 100644 --- a/sql/core/src/test/resources/sql-tests/results/describe.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/describe.sql.out @@ -362,9 +362,7 @@ DESC t PARTITION (c='Us', d=2) struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException -Partition not found in table 't' database 'default': -c -> Us -d -> 2 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -373,7 +371,7 @@ DESC t PARTITION (c='Us') struct<> -- !query output org.apache.spark.sql.AnalysisException -Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`default`.`t`' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -382,12 +380,7 @@ DESC t PARTITION (c='Us', d) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[INVALID_SQL_SYNTAX] Invalid SQL syntax: PARTITION specification is incomplete: `d`(line 1, pos 0) - -== SQL == -DESC t PARTITION (c='Us', d) -^^^ +{"errorClass":"INVALID_SQL_SYNTAX","sqlState":"42000","messageParameters":["PARTITION specification is incomplete: `d`"],"queryContext":[]} -- !query @@ -463,7 +456,7 @@ DESC temp_v PARTITION (c='Us', d=1) struct<> -- !query output org.apache.spark.sql.AnalysisException -[FORBIDDEN_OPERATION] The operation DESC PARTITION is not allowed on the TEMPORARY VIEW: `temp_v` +{"errorClass":"FORBIDDEN_OPERATION","messageParameters":["DESC PARTITION","TEMPORARY VIEW","`temp_v`"],"queryContext":[]} -- !query @@ -542,7 +535,7 @@ DESC v PARTITION (c='Us', d=1) struct<> -- !query output org.apache.spark.sql.AnalysisException -[FORBIDDEN_OPERATION] The operation DESC PARTITION is not allowed on the VIEW: `v` +{"errorClass":"FORBIDDEN_OPERATION","messageParameters":["DESC PARTITION","VIEW","`v`"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/except-all.sql.out b/sql/core/src/test/resources/sql-tests/results/except-all.sql.out index c510ad1d8314d..665a5c1787670 100644 --- a/sql/core/src/test/resources/sql-tests/results/except-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/except-all.sql.out @@ -138,7 +138,7 @@ SELECT array(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -ExceptAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -210,7 +210,7 @@ SELECT k, v FROM tab4 struct<> -- !query output org.apache.spark.sql.AnalysisException -ExceptAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/extract.sql.out b/sql/core/src/test/resources/sql-tests/results/extract.sql.out index 890c31f81dbd4..f1e942e63af4a 100644 --- a/sql/core/src/test/resources/sql-tests/results/extract.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/extract.sql.out @@ -317,7 +317,7 @@ select extract(not_supported from c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -326,7 +326,7 @@ select extract(not_supported from i) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -335,7 +335,7 @@ select extract(not_supported from j) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -648,7 +648,7 @@ select date_part('not_supported', c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -657,7 +657,7 @@ select date_part(c, c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -The 'field' parameter of function 'date_part' needs to be a string literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -674,7 +674,7 @@ select date_part(i, i) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -The 'field' parameter of function 'date_part' needs to be a string literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -883,7 +883,7 @@ select extract(DAY from interval '2-1' YEAR TO MONTH) struct<> -- !query output org.apache.spark.sql.AnalysisException -Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -892,7 +892,7 @@ select date_part('DAY', interval '2-1' YEAR TO MONTH) struct<> -- !query output org.apache.spark.sql.AnalysisException -Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -901,7 +901,7 @@ select date_part('not_supported', interval '2-1' YEAR TO MONTH) struct<> -- !query output org.apache.spark.sql.AnalysisException -Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1014,7 +1014,7 @@ select extract(MONTH from interval '123 12:34:56.789123123' DAY TO SECOND) struct<> -- !query output org.apache.spark.sql.AnalysisException -Literals of type 'MONTH' are currently not supported for the interval day to second type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1023,4 +1023,4 @@ select date_part('not_supported', interval '123 12:34:56.789123123' DAY TO SECON struct<> -- !query output org.apache.spark.sql.AnalysisException -Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out b/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out index 4645bb1dd4799..f8db5a4ec8187 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out @@ -131,12 +131,7 @@ SELECT course, year, SUM(earnings) FROM courseSales GROUP BY ROLLUP(course, year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Empty set in ROLLUP grouping sets is not supported.(line 1, pos 61) - -== SQL == -SELECT course, year, SUM(earnings) FROM courseSales GROUP BY ROLLUP(course, year, (course, year), ()) ORDER BY course, year --------------------------------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -193,12 +188,7 @@ SELECT course, year, SUM(earnings) FROM courseSales GROUP BY CUBE(course, year, struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Empty set in CUBE grouping sets is not supported.(line 1, pos 61) - -== SQL == -SELECT course, year, SUM(earnings) FROM courseSales GROUP BY CUBE(course, year, (course, year), ()) ORDER BY course, year --------------------------------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -451,7 +441,7 @@ SELECT course, year, GROUPING(course) FROM courseSales GROUP BY course, year struct<> -- !query output org.apache.spark.sql.AnalysisException -grouping() can only be used with GroupingSets/Cube/Rollup +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -460,7 +450,7 @@ SELECT course, year, GROUPING_ID(course, year) FROM courseSales GROUP BY course, struct<> -- !query output org.apache.spark.sql.AnalysisException -grouping_id() can only be used with GroupingSets/Cube/Rollup +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -496,7 +486,7 @@ SELECT course, year FROM courseSales GROUP BY course, year HAVING GROUPING(cours struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNSUPPORTED_GROUPING_EXPRESSION] grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} -- !query @@ -505,7 +495,7 @@ SELECT course, year FROM courseSales GROUP BY course, year HAVING GROUPING_ID(co struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNSUPPORTED_GROUPING_EXPRESSION] grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} -- !query @@ -560,7 +550,7 @@ SELECT course, year FROM courseSales GROUP BY course, year ORDER BY GROUPING(cou struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNSUPPORTED_GROUPING_EXPRESSION] grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} -- !query @@ -569,7 +559,7 @@ SELECT course, year FROM courseSales GROUP BY course, year ORDER BY GROUPING_ID( struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNSUPPORTED_GROUPING_EXPRESSION] grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out index cd47c70ea8591..032b4ded6b3a3 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out @@ -48,7 +48,7 @@ SELECT a, COUNT(b) FILTER (WHERE a >= 2) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(count(testdata.b) FILTER (WHERE (testdata.a >= 2)) AS `count(b) FILTER (WHERE (a >= 2))`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -228,7 +228,7 @@ SELECT a, COUNT(b) FILTER (WHERE a != 2) FROM testData GROUP BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -708,7 +708,7 @@ SELECT a + 2, COUNT(b) FILTER (WHERE b IN (1, 2)) FROM testData GROUP BY a + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out index 0e4ec436b3b7c..b1898aa4681b2 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out @@ -92,7 +92,7 @@ select a, b from data group by -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -GROUP BY position -1 is not in select list (valid range is [1, 2]); line 1 pos 31 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -101,7 +101,7 @@ select a, b from data group by 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -GROUP BY position 0 is not in select list (valid range is [1, 2]); line 1 pos 31 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -110,7 +110,7 @@ select a, b from data group by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 31 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -119,7 +119,7 @@ select a, b, sum(b) from data group by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got sum(data.b) AS `sum(b)`; line 1 pos 39 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -128,7 +128,7 @@ select a, b, sum(b) + 2 from data group by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got (sum(data.b) + CAST(2 AS BIGINT)) AS `(sum(b) + 2)`; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -152,7 +152,7 @@ select * from data group by a, b, 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Star (*) is not allowed in select list when GROUP BY ordinal position is used +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -349,7 +349,7 @@ select a, b, count(1) from data group by a, -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -GROUP BY position -1 is not in select list (valid range is [1, 3]); line 1 pos 44 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -358,7 +358,7 @@ select a, b, count(1) from data group by a, 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got count(1) AS `count(1)`; line 1 pos 44 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -367,7 +367,7 @@ select a, b, count(1) from data group by cube(-1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -GROUP BY position -1 is not in select list (valid range is [1, 3]); line 1 pos 46 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -376,7 +376,7 @@ select a, b, count(1) from data group by cube(1, 3) struct<> -- !query output org.apache.spark.sql.AnalysisException -GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got count(1) AS `count(1)`; line 1 pos 49 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out index 0ce704e01bad6..b86d0b7cb082f 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out @@ -15,7 +15,7 @@ SELECT a, COUNT(b) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(count(testdata.b) AS `count(b)`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -43,7 +43,7 @@ SELECT a, COUNT(b) FROM testData GROUP BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -107,7 +107,7 @@ SELECT a + 2, COUNT(b) FROM testData GROUP BY a + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -164,7 +164,7 @@ SELECT a AS k, COUNT(non_existing) FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `non_existing` cannot be resolved. Did you mean one of the following? [`testdata`.`a`, `testdata`.`b`]; line 1 pos 21 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`non_existing`","`testdata`.`a`, `testdata`.`b`"],"queryContext":[]} -- !query @@ -173,7 +173,7 @@ SELECT COUNT(b) AS k FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -aggregate functions are not allowed in GROUP BY, but found count(testdata.b) +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -191,7 +191,7 @@ SELECT k AS a, COUNT(v) FROM testDataHasSameNameWithAlias GROUP BY a struct<> -- !query output org.apache.spark.sql.AnalysisException -expression 'testdatahassamenamewithalias.k' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -208,7 +208,7 @@ SELECT a AS k, COUNT(b) FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `k` cannot be resolved. Did you mean one of the following? [`testdata`.`a`, `testdata`.`b`]; line 1 pos 47 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`k`","`testdata`.`a`, `testdata`.`b`"],"queryContext":[]} -- !query @@ -280,7 +280,7 @@ SELECT id FROM range(10) HAVING id > 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -grouping expressions sequence is empty, and 'id' is not an aggregate function. Wrap '()' in windowing function(s) or wrap 'id' in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -314,10 +314,7 @@ SELECT 1 FROM range(10) HAVING MAX(id) > 0 struct<> -- !query output org.apache.spark.sql.AnalysisException - -Aggregate/Window/Generate expressions are not valid in where clause of the query. -Expression in where clause: [(max(id) > CAST(0 AS BIGINT))] -Invalid expressions: [max(id)] +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -447,7 +444,7 @@ SELECT every(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'every(1)' due to data type mismatch: argument 1 requires boolean type, however, '1' is of int type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -456,7 +453,7 @@ SELECT some(1S) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'some(1S)' due to data type mismatch: argument 1 requires boolean type, however, '1S' is of smallint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -465,7 +462,7 @@ SELECT any(1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'any(1L)' due to data type mismatch: argument 1 requires boolean type, however, '1L' is of bigint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -474,7 +471,7 @@ SELECT every("true") struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'every('true')' due to data type mismatch: argument 1 requires boolean type, however, ''true'' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -483,7 +480,7 @@ SELECT bool_and(1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'bool_and(1.0BD)' due to data type mismatch: argument 1 requires boolean type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -492,7 +489,7 @@ SELECT bool_or(1.0D) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'bool_or(1.0D)' due to data type mismatch: argument 1 requires boolean type, however, '1.0D' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -612,10 +609,7 @@ SELECT count(*) FROM test_agg WHERE count(*) > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException - -Aggregate/Window/Generate expressions are not valid in where clause of the query. -Expression in where clause: [(count(1) > 1L)] -Invalid expressions: [count(1)] +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -624,10 +618,7 @@ SELECT count(*) FROM test_agg WHERE count(*) + 1L > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException - -Aggregate/Window/Generate expressions are not valid in where clause of the query. -Expression in where clause: [((count(1) + 1L) > 1L)] -Invalid expressions: [count(1)] +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -636,10 +627,7 @@ SELECT count(*) FROM test_agg WHERE k = 1 or k = 2 or count(*) + 1L > 1L or max( struct<> -- !query output org.apache.spark.sql.AnalysisException - -Aggregate/Window/Generate expressions are not valid in where clause of the query. -Expression in where clause: [(((test_agg.k = 1) OR (test_agg.k = 2)) OR (((count(1) + 1L) > 1L) OR (max(test_agg.k) > 1)))] -Invalid expressions: [count(1), max(test_agg.k)] +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out b/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out index bb6371a97f76a..20c93b1d04306 100644 --- a/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out @@ -134,12 +134,7 @@ SELECT a, b, c, count(d) FROM grouping GROUP BY WITH ROLLUP struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'ROLLUP': extra input 'ROLLUP'(line 1, pos 53) - -== SQL == -SELECT a, b, c, count(d) FROM grouping GROUP BY WITH ROLLUP ------------------------------------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'ROLLUP'",": extra input 'ROLLUP'"],"queryContext":[]} -- !query @@ -148,12 +143,7 @@ SELECT a, b, c, count(d) FROM grouping GROUP BY WITH CUBE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'CUBE': extra input 'CUBE'(line 1, pos 53) - -== SQL == -SELECT a, b, c, count(d) FROM grouping GROUP BY WITH CUBE ------------------------------------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'CUBE'",": extra input 'CUBE'"],"queryContext":[]} -- !query @@ -162,7 +152,7 @@ SELECT c1 FROM (values (1,2), (3,2)) t(c1, c2) GROUP BY GROUPING SETS (()) struct<> -- !query output org.apache.spark.sql.AnalysisException -expression 't.c1' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/having.sql.out b/sql/core/src/test/resources/sql-tests/results/having.sql.out index e9e24562d1ba4..592571f36aaff 100644 --- a/sql/core/src/test/resources/sql-tests/results/having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/having.sql.out @@ -35,7 +35,7 @@ SELECT count(k) FROM hav GROUP BY v HAVING v = array(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(hav.v = array(1))' due to data type mismatch: differing types in '(hav.v = array(1))' (int and array).; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out index c6bbb4fb7179a..2d1d9aedc6d17 100644 --- a/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out @@ -17,7 +17,7 @@ select upper(x -> x) as v struct<> -- !query output org.apache.spark.sql.AnalysisException -A lambda function should only be used in a higher order function. However, its class is org.apache.spark.sql.catalyst.expressions.Upper, which is not a higher order function.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out b/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out index 54dd03d32ea50..41b6b5536a2d6 100644 --- a/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out @@ -129,9 +129,4 @@ SELECT company FROM ilike_any_table WHERE company ILIKE ALL () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Expected something between '(' and ')'.(line 1, pos 50) - -== SQL == -SELECT company FROM ilike_any_table WHERE company ILIKE ALL () ---------------------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out b/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out index 91a2f40386457..bdd73e7cd3b3d 100644 --- a/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out @@ -135,9 +135,4 @@ SELECT company FROM ilike_any_table WHERE company ILIKE ANY () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Expected something between '(' and ')'.(line 1, pos 50) - -== SQL == -SELECT company FROM ilike_any_table WHERE company ILIKE ANY () ---------------------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out b/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out index f2eee23a52cdd..576330dfca965 100644 --- a/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out @@ -110,7 +110,7 @@ select * from values ("one", rand(5)), ("two", 3.0D) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot evaluate expression rand(5) in inline table definition; line 1 pos 29 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -119,7 +119,7 @@ select * from values ("one", 2.0), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -expected 2 columns but found 1 columns in row 1; line 1 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -128,7 +128,7 @@ select * from values ("one", array(0, 1)), ("two", struct(1, 2)) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -incompatible types found in column b for inline table; line 1 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select * from values ("one"), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -expected 2 columns but found 1 columns in row 0; line 1 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -146,7 +146,7 @@ select * from values ("one", random_not_exist_func(1)), ("two", 2) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -Undefined function: random_not_exist_func. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.random_not_exist_func.; line 1 pos 29 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -155,7 +155,7 @@ select * from values ("one", count(1)), ("two", 2) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot evaluate expression count(1) in inline table definition; line 1 pos 29 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out b/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out index 062c3761d2513..69ba7f5ad344a 100644 --- a/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out @@ -95,7 +95,7 @@ SELECT array(1), 2 struct<> -- !query output org.apache.spark.sql.AnalysisException -IntersectAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -106,7 +106,7 @@ SELECT k, v FROM tab2 struct<> -- !query output org.apache.spark.sql.AnalysisException -IntersectAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 8234031021ae9..1ccd8de9c3023 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -13,12 +13,7 @@ select interval 4 month 2 weeks 3 microseconds * 1.5 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval 4 month 2 weeks 3 microseconds(line 1, pos 7) - -== SQL == -select interval 4 month 2 weeks 3 microseconds * 1.5 --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -183,7 +178,7 @@ select '2' / interval 2 second struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2' AS DOUBLE) / INTERVAL '02' SECOND)' due to data type mismatch: differing types in '(CAST('2' AS DOUBLE) / INTERVAL '02' SECOND)' (double and interval second).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -192,7 +187,7 @@ select '2' / interval 2 year struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2' AS DOUBLE) / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(CAST('2' AS DOUBLE) / INTERVAL '2' YEAR)' (double and interval year).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -201,10 +196,7 @@ select interval '2 seconds' / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[INTERVAL_DIVIDED_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. -== SQL(line 1, position 8) == -select interval '2 seconds' / 0 - ^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INTERVAL_DIVIDED_BY_ZERO","sqlState":"22012","messageParameters":[],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":30,"fragment":"interval '2 seconds' / "}]} -- !query @@ -237,10 +229,7 @@ select interval '2' year / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[INTERVAL_DIVIDED_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. -== SQL(line 1, position 8) == -select interval '2' year / 0 - ^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INTERVAL_DIVIDED_BY_ZERO","sqlState":"22012","messageParameters":[],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"interval '2' year / "}]} -- !query @@ -273,7 +262,7 @@ select 2 / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(2 / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(2 / INTERVAL '2' YEAR)' (int and interval year).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -282,7 +271,7 @@ select 2 / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(2 / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(2 / INTERVAL '02' HOUR)' (int and interval hour).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -291,7 +280,7 @@ select null / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(NULL / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(NULL / INTERVAL '2' YEAR)' (void and interval year).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -300,7 +289,7 @@ select null / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(NULL / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(NULL / INTERVAL '02' HOUR)' (void and interval hour).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -309,12 +298,7 @@ select -interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8) - -== SQL == -select -interval '-1 month 1 day -1 second' ---------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -339,12 +323,7 @@ select -interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8) - -== SQL == -select -interval -1 month 1 day -1 second ---------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -369,12 +348,7 @@ select +interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8) - -== SQL == -select +interval '-1 month 1 day -1 second' ---------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -399,12 +373,7 @@ select +interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8) - -== SQL == -select +interval -1 month 1 day -1 second ---------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -816,12 +785,7 @@ select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisec struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond(line 1, pos 7) - -== SQL == -select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -854,12 +818,7 @@ select interval '30' year '25' month '-100' day '40' hour '80' minute '299.88998 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot mix year-month and day-time fields: interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second(line 1, pos 7) - -== SQL == -select interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -988,12 +947,7 @@ select interval '20 15:40:32.99899999' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '20 15:40:32.99899999' day to hour -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1002,12 +956,7 @@ select interval '20 15:40:32.99899999' day to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '20 15:40:32.99899999' day to minute -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1016,12 +965,7 @@ select interval '15:40:32.99899999' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '15:40:32.99899999' hour to minute -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1030,12 +974,7 @@ select interval '15:40.99899999' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '15:40.99899999' hour to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1044,12 +983,7 @@ select interval '15:40' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '15:40' hour to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1058,12 +992,7 @@ select interval '20 40:32.99899999' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '20 40:32.99899999' minute to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1072,12 +1001,7 @@ select interval 10 nanoseconds struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Error parsing ' 10 nanoseconds' to interval, invalid unit 'nanoseconds'(line 1, pos 16) - -== SQL == -select interval 10 nanoseconds -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1166,12 +1090,7 @@ select interval struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -at least one time unit should be given for interval literal(line 1, pos 7) - -== SQL == -select interval --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1180,12 +1099,7 @@ select interval 1 fake_unit struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Error parsing ' 1 fake_unit' to interval, invalid unit 'fake_unit'(line 1, pos 16) - -== SQL == -select interval 1 fake_unit -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1194,12 +1108,7 @@ select interval 1 year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -The value of from-to unit must be a string(line 1, pos 16) - -== SQL == -select interval 1 year to month -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1208,12 +1117,7 @@ select interval '1' year to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Intervals FROM year TO second are not supported.(line 1, pos 16) - -== SQL == -select interval '1' year to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1222,12 +1126,7 @@ select interval '10-9' year to month '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) - -== SQL == -select interval '10-9' year to month '2-1' year to month --------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1236,12 +1135,7 @@ select interval '10-9' year to month '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) - -== SQL == -select interval '10-9' year to month '12:11:10' hour to second --------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1250,12 +1144,7 @@ select interval '1 15:11' day to minute '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 40) - -== SQL == -select interval '1 15:11' day to minute '12:11:10' hour to second -----------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1264,12 +1153,7 @@ select interval 1 year '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 23) - -== SQL == -select interval 1 year '2-1' year to month ------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1278,12 +1162,7 @@ select interval 1 year '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 23) - -== SQL == -select interval 1 year '12:11:10' hour to second ------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1292,12 +1171,7 @@ select interval '10-9' year to month '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) - -== SQL == -select interval '10-9' year to month '1' year --------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1306,12 +1180,7 @@ select interval '12:11:10' hour to second '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only have a single from-to unit in the interval literal syntax(line 1, pos 42) - -== SQL == -select interval '12:11:10' hour to second '1' year -------------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1320,7 +1189,7 @@ select interval (-30) day struct<> -- !query output org.apache.spark.sql.AnalysisException -Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1329,7 +1198,7 @@ select interval (a + 1) day struct<> -- !query output org.apache.spark.sql.AnalysisException -Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1338,12 +1207,7 @@ select interval 30 day day day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'day': extra input 'day'(line 1, pos 27) - -== SQL == -select interval 30 day day day ----------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'day'",": extra input 'day'"],"queryContext":[]} -- !query @@ -1352,7 +1216,7 @@ select interval (-30) days struct<> -- !query output org.apache.spark.sql.AnalysisException -Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1361,7 +1225,7 @@ select interval (a + 1) days struct<> -- !query output org.apache.spark.sql.AnalysisException -Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1370,12 +1234,7 @@ select interval 30 days days days struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'days': extra input 'days'(line 1, pos 29) - -== SQL == -select interval 30 days days days ------------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'days'",": extra input 'days'"],"queryContext":[]} -- !query @@ -1392,12 +1251,7 @@ SELECT INTERVAL '178956970-8' YEAR TO MONTH struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Error parsing interval year-month string: integer overflow(line 1, pos 16) - -== SQL == -SELECT INTERVAL '178956970-8' YEAR TO MONTH -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1442,7 +1296,7 @@ select struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '2' YEAR + CAST('3-3 year to month' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST('3-3 year to month' AS DOUBLE))' (interval year and double).; line 2 pos 2 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1467,7 +1321,7 @@ select interval '2' year + '3-3' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '2' YEAR + CAST('3-3' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST('3-3' AS DOUBLE))' (interval year and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1476,7 +1330,7 @@ select interval '2' year - '4' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '2' YEAR - CAST('4' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - CAST('4' AS DOUBLE))' (interval year and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1509,7 +1363,7 @@ select interval '2' year + str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '2' YEAR + CAST(interval_view.str AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST(interval_view.str AS DOUBLE))' (interval year and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1518,7 +1372,7 @@ select interval '2' year - str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '2' YEAR - CAST(interval_view.str AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - CAST(interval_view.str AS DOUBLE))' (interval year and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1543,7 +1397,7 @@ select interval '2-2' year to month + interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1552,7 +1406,7 @@ select interval '3' day + interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1561,7 +1415,7 @@ select interval '2-2' year to month - interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1570,7 +1424,7 @@ select interval '3' day - interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' due to data type mismatch: differing types in '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' (interval day and interval year to month).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1579,7 +1433,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1588,7 +1442,7 @@ select 1 + interval '2' month struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(1 + INTERVAL '2' MONTH)' due to data type mismatch: differing types in '(1 + INTERVAL '2' MONTH)' (int and interval month).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1597,7 +1451,7 @@ select interval '2' second + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '1 + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1606,7 +1460,7 @@ select interval '2' month - 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '2' MONTH - 1)' due to data type mismatch: differing types in '(INTERVAL '2' MONTH - 1)' (interval month and int).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1663,12 +1517,7 @@ select interval '-\t2-2\t' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match year-month format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: - 2-2 (line 1, pos 16) - -== SQL == -select interval '-\t2-2\t' year to month -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1685,13 +1534,7 @@ select interval '\n-\t10\t 12:34:46.789\t' day to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: -- 10 12:34:46.789 , set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -select interval '\n-\t10\t 12:34:46.789\t' day to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1700,12 +1543,7 @@ select interval '中文 interval 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: 中文 interval 1 day(line 1, pos 7) - -== SQL == -select interval '中文 interval 1 day' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1714,12 +1552,7 @@ select interval 'interval中文 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: interval中文 1 day(line 1, pos 7) - -== SQL == -select interval 'interval中文 1 day' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1728,12 +1561,7 @@ select interval 'interval 1中文day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: interval 1中文day(line 1, pos 7) - -== SQL == -select interval 'interval 1中文day' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1741,8 +1569,8 @@ select -(a) from values (interval '-2147483648 months', interval '2147483647 mon -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] integer overflow. +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1750,8 +1578,8 @@ select a - b from values (interval '-2147483648 months', interval '2147483647 mo -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] integer overflow. Use 'try_subtract' to tolerate overflow and return NULL instead. +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1759,8 +1587,8 @@ select b + interval '1 month' from values (interval '-2147483648 months', interv -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] integer overflow. Use 'try_add' to tolerate overflow and return NULL instead. +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1768,8 +1596,8 @@ select a * 1.1 from values (interval '-2147483648 months', interval '2147483647 -- !query schema struct<> -- !query output -java.lang.ArithmeticException -Overflow +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1777,8 +1605,8 @@ select a / 0.5 from values (interval '-2147483648 months', interval '2147483647 -- !query schema struct<> -- !query output -java.lang.ArithmeticException -Overflow +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1839,12 +1667,7 @@ select interval '+' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: +(line 1, pos 7) - -== SQL == -select interval '+' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1853,12 +1676,7 @@ select interval '+.' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: +.(line 1, pos 7) - -== SQL == -select interval '+.' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1867,12 +1685,7 @@ select interval '1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: 1(line 1, pos 7) - -== SQL == -select interval '1' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1881,12 +1694,7 @@ select interval '1.2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: 1.2(line 1, pos 7) - -== SQL == -select interval '1.2' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1895,12 +1703,7 @@ select interval '- 2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: - 2(line 1, pos 7) - -== SQL == -select interval '- 2' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1909,12 +1712,7 @@ select interval '1 day -' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: 1 day -(line 1, pos 7) - -== SQL == -select interval '1 day -' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1923,12 +1721,7 @@ select interval '1 day 1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the INTERVAL value: 1 day 1(line 1, pos 7) - -== SQL == -select interval '1 day 1' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1937,12 +1730,7 @@ select interval '1 day 2' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: 1 day 2(line 1, pos 16) - -== SQL == -select interval '1 day 2' day -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1951,12 +1739,7 @@ select interval 'interval 1' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: interval 1(line 1, pos 16) - -== SQL == -select interval 'interval 1' day -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1989,10 +1772,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1 struct<> -- !query output org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] Interval value overflows after being divided by -1. Use 'try_divide' to tolerate overflow and return NULL instead. -== SQL(line 1, position 8) == -SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1 - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":50,"fragment":"(INTERVAL '-178956970-8' YEAR TO MONTH) / -"}]} -- !query @@ -2001,10 +1781,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L struct<> -- !query output org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] Interval value overflows after being divided by -1. Use 'try_divide' to tolerate overflow and return NULL instead. -== SQL(line 1, position 8) == -SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":51,"fragment":"(INTERVAL '-178956970-8' YEAR TO MONTH) / -1"}]} -- !query @@ -2047,10 +1824,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1 struct<> -- !query output org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] Interval value overflows after being divided by -1. Use 'try_divide' to tolerate overflow and return NULL instead. -== SQL(line 1, position 8) == -SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1 - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":64,"fragment":"(INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -"}]} -- !query @@ -2059,10 +1833,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L struct<> -- !query output org.apache.spark.SparkArithmeticException -[INTERVAL_ARITHMETIC_OVERFLOW] Interval value overflows after being divided by -1. Use 'try_divide' to tolerate overflow and return NULL instead. -== SQL(line 1, position 8) == -SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":65,"fragment":"(INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1"}]} -- !query @@ -2185,12 +1956,7 @@ SELECT INTERVAL '106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -requirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16) - -== SQL == -SELECT INTERVAL '106751992 04' DAY TO HOUR -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2199,12 +1965,7 @@ SELECT INTERVAL '-106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -requirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16) - -== SQL == -SELECT INTERVAL '-106751992 04' DAY TO HOUR -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2213,12 +1974,7 @@ SELECT INTERVAL '2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -requirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16) - -== SQL == -SELECT INTERVAL '2562047789:00' HOUR TO MINUTE -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2227,12 +1983,7 @@ SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -requirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16) - -== SQL == -SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2241,12 +1992,7 @@ SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -requirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16) - -== SQL == -SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2255,12 +2001,7 @@ SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -requirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16) - -== SQL == -SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2397,7 +2138,7 @@ SELECT INTERVAL 1 MONTH > INTERVAL 20 DAYS struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' (interval month and interval day).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2406,7 +2147,7 @@ SELECT INTERVAL '1' DAY < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' DAY < '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY < '1')' (interval day and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2415,7 +2156,7 @@ SELECT INTERVAL '1' DAY = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' DAY = '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY = '1')' (interval day and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2424,7 +2165,7 @@ SELECT INTERVAL '1' DAY > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' DAY > '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY > '1')' (interval day and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2433,7 +2174,7 @@ SELECT '1' < INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' < INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' < INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2442,7 +2183,7 @@ SELECT '1' = INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' = INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' = INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2451,7 +2192,7 @@ SELECT '1' > INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' > INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' > INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2460,7 +2201,7 @@ SELECT INTERVAL '1' YEAR < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' YEAR < '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR < '1')' (interval year and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2469,7 +2210,7 @@ SELECT INTERVAL '1' YEAR = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' YEAR = '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR = '1')' (interval year and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2478,7 +2219,7 @@ SELECT INTERVAL '1' YEAR > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' YEAR > '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR > '1')' (interval year and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2487,7 +2228,7 @@ SELECT '1' < INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' < INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' < INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2496,7 +2237,7 @@ SELECT '1' = INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' = INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' = INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2505,7 +2246,7 @@ SELECT '1' > INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' > INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' > INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2530,7 +2271,7 @@ SELECT array(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'array(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function array should all be the same type, but it's [interval month, interval day]; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2555,7 +2296,7 @@ SELECT coalesce(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function coalesce should all be the same type, but it's [interval month, interval day]; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2612,7 +2353,7 @@ SELECT div(INTERVAL '1' MONTH, INTERVAL '-1' DAY) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' (interval month and interval day).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out index ea4c8b80b8b88..235bddb5f5de2 100644 --- a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out @@ -114,7 +114,7 @@ SELECT * FROM t1, LATERAL (SELECT t1.*, t2.* FROM t2, LATERAL (SELECT t1.*, t2.* struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 't1.*' given input columns 'c1, c2'; line 1 pos 70 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -149,12 +149,7 @@ SELECT * FROM t1 NATURAL JOIN LATERAL (SELECT c1 + c2 AS c2) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[UNSUPPORTED_FEATURE.LATERAL_NATURAL_JOIN] The feature is not supported: NATURAL join with LATERAL correlation.(line 1, pos 14) - -== SQL == -SELECT * FROM t1 NATURAL JOIN LATERAL (SELECT c1 + c2 AS c2) ---------------^^^ +{"errorClass":"UNSUPPORTED_FEATURE","errorSubClass":"LATERAL_NATURAL_JOIN","sqlState":"0A000","messageParameters":[],"queryContext":[]} -- !query @@ -163,12 +158,7 @@ SELECT * FROM t1 JOIN LATERAL (SELECT c1 + c2 AS c2) USING (c2) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[UNSUPPORTED_FEATURE.LATERAL_JOIN_USING] The feature is not supported: JOIN USING with LATERAL correlation.(line 1, pos 14) - -== SQL == -SELECT * FROM t1 JOIN LATERAL (SELECT c1 + c2 AS c2) USING (c2) ---------------^^^ +{"errorClass":"UNSUPPORTED_FEATURE","errorSubClass":"LATERAL_JOIN_USING","sqlState":"0A000","messageParameters":[],"queryContext":[]} -- !query @@ -266,7 +256,7 @@ SELECT * FROM t1 JOIN LATERAL (SELECT t1.c1 AS a, t2.c1 AS b) s JOIN t2 ON s.b = struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `t2`.`c1` cannot be resolved. Did you mean one of the following? []; line 1 pos 50 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t2`.`c1`",""],"queryContext":[]} -- !query @@ -291,11 +281,7 @@ SELECT * FROM t1, LATERAL (SELECT c1 + c2 + rand(0) AS c3) struct<> -- !query output org.apache.spark.sql.AnalysisException -Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row -SubqueryAlias __auto_generated_subquery_name -+- Project [(cast((outer(c1#x) + outer(c2#x)) as double) + rand(0)) AS c3#x] - +- OneRowRelation -; line 1 pos 9 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -304,14 +290,7 @@ SELECT * FROM t1, LATERAL (SELECT rand(0) FROM t2) struct<> -- !query output org.apache.spark.sql.AnalysisException -Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row -SubqueryAlias __auto_generated_subquery_name -+- Project [rand(0) AS rand(0)#x] - +- SubqueryAlias spark_catalog.default.t2 - +- View (`spark_catalog`.`default`.`t2`, [c1#x,c2#x]) - +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x] - +- LocalRelation [col1#x, col2#x] -; line 1 pos 9 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -320,7 +299,7 @@ SELECT * FROM t1 JOIN LATERAL (SELECT * FROM t2) s ON t1.c1 + rand(0) = s.c1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Lateral join condition cannot be non-deterministic: ((CAST(spark_catalog.default.t1.c1 AS DOUBLE) + rand(0)) = CAST(s.c1 AS DOUBLE)); line 1 pos 17 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -384,7 +363,7 @@ SELECT * FROM t1, LATERAL (SELECT * FROM t2, LATERAL (SELECT t1.c1 + t2.c1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `t1`.`c1` cannot be resolved. Did you mean one of the following? []; line 1 pos 61 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t1`.`c1`",""],"queryContext":[]} -- !query @@ -393,7 +372,7 @@ SELECT * FROM t1, LATERAL (SELECT * FROM (SELECT c1), LATERAL (SELECT c2)) struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `c2` cannot be resolved. Did you mean one of the following? []; line 1 pos 70 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`c2`",""],"queryContext":[]} -- !query @@ -420,7 +399,7 @@ SELECT * FROM t1, LATERAL (SELECT c1, (SELECT SUM(c2) FROM t2 WHERE c1 = t1.c1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `t1`.`c1` cannot be resolved. Did you mean one of the following? [`spark_catalog`.`default`.`t2`.`c1`, `spark_catalog`.`default`.`t2`.`c2`]; line 1 pos 73 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t1`.`c1`","`spark_catalog`.`default`.`t2`.`c1`, `spark_catalog`.`default`.`t2`.`c2`"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out index 55d80d95ccfa5..7412241e080b4 100644 --- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out @@ -69,7 +69,7 @@ select to_json(named_struct('a', 1, 'b', 2), named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -Must use a map() function for options; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -78,7 +78,7 @@ select to_json(named_struct('a', 1, 'b', 2), map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -A type of keys and values in map() must be string, but got map; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -87,7 +87,7 @@ select to_json() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function to_json. Expected: one of 1 and 2; Found: 0; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -112,7 +112,7 @@ select from_json('{"a":1}', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -The expression '1' is not a valid schema string.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -121,20 +121,7 @@ select from_json('{"a":1}', 'a InvalidType') struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot parse the data type: -[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2) - -== SQL == -a InvalidType ---^^^ - -Failed fallback parsing: -DataType invalidtype is not supported.(line 1, pos 2) - -== SQL == -a InvalidType ---^^^ -; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -143,7 +130,7 @@ select from_json('{"a":1}', 'a INT', named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -Must use a map() function for options; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -152,7 +139,7 @@ select from_json('{"a":1}', 'a INT', map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -A type of keys and values in map() must be string, but got map; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -161,7 +148,7 @@ select from_json() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function from_json. Expected: one of 2 and 3; Found: 0; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -336,8 +323,7 @@ select from_json( struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '02-29' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'02-29'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -349,8 +335,7 @@ select from_json( struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to parse '02-29' in the new parser. You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0, or set to "CORRECTED" and treat it as an invalid datetime string. +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'02-29'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -391,7 +376,7 @@ select schema_of_json(null) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'schema_of_json(NULL)' due to data type mismatch: The input json should be a foldable string expression and not null; however, got NULL.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -408,7 +393,7 @@ SELECT schema_of_json(jsonField) FROM jsonTable struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'schema_of_json(jsontable.jsonField)' due to data type mismatch: The input json should be a foldable string expression and not null; however, got jsontable.jsonField.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -425,7 +410,7 @@ select json_array_length(2) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'json_array_length(2)' due to data type mismatch: argument 1 requires string type, however, '2' is of int type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -434,7 +419,7 @@ select json_array_length() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function json_array_length. Expected: 1; Found: 0; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -507,7 +492,7 @@ select json_object_keys() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function json_object_keys. Expected: 1; Found: 0; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -524,7 +509,7 @@ select json_object_keys(200) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'json_object_keys(200)' due to data type mismatch: argument 1 requires string type, however, '200' is of int type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/like-all.sql.out b/sql/core/src/test/resources/sql-tests/results/like-all.sql.out index 3a252d84ba155..bb1838d638da1 100644 --- a/sql/core/src/test/resources/sql-tests/results/like-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/like-all.sql.out @@ -129,9 +129,4 @@ SELECT company FROM like_all_table WHERE company LIKE ALL () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Expected something between '(' and ')'.(line 1, pos 49) - -== SQL == -SELECT company FROM like_all_table WHERE company LIKE ALL () --------------------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/like-any.sql.out b/sql/core/src/test/resources/sql-tests/results/like-any.sql.out index ecf7b6e7bbcc3..6f35dfb91e5e7 100644 --- a/sql/core/src/test/resources/sql-tests/results/like-any.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/like-any.sql.out @@ -135,9 +135,4 @@ SELECT company FROM like_any_table WHERE company LIKE ANY () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Expected something between '(' and ')'.(line 1, pos 49) - -== SQL == -SELECT company FROM like_any_table WHERE company LIKE ANY () --------------------------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/limit.sql.out b/sql/core/src/test/resources/sql-tests/results/limit.sql.out index 7d1c1e2b34dff..579d2b24cf8d0 100644 --- a/sql/core/src/test/resources/sql-tests/results/limit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/limit.sql.out @@ -50,7 +50,7 @@ SELECT * FROM testdata LIMIT -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -The limit expression must be equal to or greater than 0, but got -1 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -59,7 +59,7 @@ SELECT * FROM testData TABLESAMPLE (-1 ROWS) struct<> -- !query output org.apache.spark.sql.AnalysisException -The limit expression must be equal to or greater than 0, but got -1 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -76,7 +76,7 @@ SELECT * FROM testdata LIMIT CAST(NULL AS INT) struct<> -- !query output org.apache.spark.sql.AnalysisException -The evaluated limit expression must not be null, but got CAST(NULL AS INT) +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -85,7 +85,7 @@ SELECT * FROM testdata LIMIT key > 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -The limit expression must evaluate to a constant value, but got (spark_catalog.default.testdata.key > 3) +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -94,7 +94,7 @@ SELECT * FROM testdata LIMIT true struct<> -- !query output org.apache.spark.sql.AnalysisException -The limit expression must be integer type, but got boolean +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -103,7 +103,7 @@ SELECT * FROM testdata LIMIT 'a' struct<> -- !query output org.apache.spark.sql.AnalysisException -The limit expression must be integer type, but got string +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index 6edd513ea2ffa..b40cf817fb328 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -37,12 +37,7 @@ select 128Y struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Numeric literal 128 does not fit in range [-128, 127] for type tinyint(line 1, pos 7) - -== SQL == -select 128Y --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -67,12 +62,7 @@ select 32768S struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Numeric literal 32768 does not fit in range [-32768, 32767] for type smallint(line 1, pos 7) - -== SQL == -select 32768S --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -97,12 +87,7 @@ select 9223372036854775808L struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Numeric literal 9223372036854775808 does not fit in range [-9223372036854775808, 9223372036854775807] for type bigint(line 1, pos 7) - -== SQL == -select 9223372036854775808L --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -143,10 +128,7 @@ select 1234567890123456789012345678901234567890 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -decimal can only support precision up to 38 -== SQL == -select 1234567890123456789012345678901234567890 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -155,10 +137,7 @@ select 1234567890123456789012345678901234567890.0 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -decimal can only support precision up to 38 -== SQL == -select 1234567890123456789012345678901234567890.0 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -183,12 +162,7 @@ select -3.4028235E39f struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Numeric literal -3.4028235E39 does not fit in range [-3.4028234663852886E+38, 3.4028234663852886E+38] for type float(line 1, pos 7) - -== SQL == -select -3.4028235E39f --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -213,12 +187,7 @@ select .e3 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near '.'(line 1, pos 7) - -== SQL == -select .e3 --------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'.'",""],"queryContext":[]} -- !query @@ -227,12 +196,7 @@ select 1E309, -1E309 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Numeric literal 1E309 does not fit in range [-1.7976931348623157E+308, 1.7976931348623157E+308] for type double(line 1, pos 7) - -== SQL == -select 1E309, -1E309 --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -331,12 +295,7 @@ select date 'mar 11 2016' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: mar 11 2016(line 1, pos 7) - -== SQL == -select date 'mar 11 2016' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -353,12 +312,7 @@ select timestamp '2016-33-11 20:54:00.000' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 2016-33-11 20:54:00.000(line 1, pos 7) - -== SQL == -select timestamp '2016-33-11 20:54:00.000' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -367,12 +321,7 @@ select GEO '(10,-6)' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Literals of type 'GEO' are currently not supported.(line 1, pos 7) - -== SQL == -select GEO '(10,-6)' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -389,12 +338,7 @@ select 1.20E-38BD struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -decimal can only support precision up to 38(line 1, pos 7) - -== SQL == -select 1.20E-38BD --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -411,12 +355,7 @@ select X'XuZ' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -contains illegal character for hexBinary: 0XuZ(line 1, pos 7) - -== SQL == -select X'XuZ' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -433,7 +372,7 @@ select +date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(+ DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -442,7 +381,7 @@ select +timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -459,7 +398,7 @@ select +map(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(+ map(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'map(1, 2)' is of map type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -468,7 +407,7 @@ select +array(1,2) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(+ array(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'array(1, 2)' is of array type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -477,7 +416,7 @@ select +named_struct('a', 1, 'b', 'spark') struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(+ named_struct('a', 1, 'b', 'spark'))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'named_struct('a', 1, 'b', 'spark')' is of struct type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -486,7 +425,7 @@ select +X'1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(+ X'01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'01'' is of binary type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -495,7 +434,7 @@ select -date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(- DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -504,7 +443,7 @@ select -timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(- TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -513,4 +452,4 @@ select -x'2379ACFe' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(- X'2379ACFE')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'2379ACFE'' is of binary type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/map.sql.out b/sql/core/src/test/resources/sql-tests/results/map.sql.out index cd7cf9a60ce37..2a7f8459fa6ea 100644 --- a/sql/core/src/test/resources/sql-tests/results/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/map.sql.out @@ -69,7 +69,7 @@ select map_contains_key(map('1', 'a', '2', 'b'), 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'map_contains_key(map('1', 'a', '2', 'b'), 1)' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, int].; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -78,4 +78,4 @@ select map_contains_key(map(1, 'a', 2, 'b'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'map_contains_key(map(1, 'a', 2, 'b'), '1')' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, string].; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out index 7e9bb2f7acd8a..a0f9f2af04de8 100644 --- a/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out @@ -68,8 +68,8 @@ SELECT assert_true(false) -- !query schema struct<> -- !query output -java.lang.RuntimeException -'false' is not true! +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -77,8 +77,8 @@ SELECT assert_true(boolean(0)) -- !query schema struct<> -- !query output -java.lang.RuntimeException -'cast(0 as boolean)' is not true! +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -86,8 +86,8 @@ SELECT assert_true(null) -- !query schema struct<> -- !query output -java.lang.RuntimeException -'null' is not true! +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -95,8 +95,8 @@ SELECT assert_true(boolean(null)) -- !query schema struct<> -- !query output -java.lang.RuntimeException -'cast(null as boolean)' is not true! +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -104,8 +104,8 @@ SELECT assert_true(false, 'custom error message') -- !query schema struct<> -- !query output -java.lang.RuntimeException -custom error message +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -121,8 +121,8 @@ SELECT raise_error('error message') -- !query schema struct<> -- !query output -java.lang.RuntimeException -error message +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -130,5 +130,5 @@ SELECT if(v > 5, raise_error('too big: ' || v), v + 1) FROM tbl_misc -- !query schema struct<> -- !query output -java.lang.RuntimeException -too big: 8 +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/natural-join.sql.out b/sql/core/src/test/resources/sql-tests/results/natural-join.sql.out index 83afea9d5db2c..4f7bba18a7ac1 100644 --- a/sql/core/src/test/resources/sql-tests/results/natural-join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/natural-join.sql.out @@ -229,7 +229,7 @@ SELECT nt2.k FROM (SELECT * FROM nt1 natural join nt2) struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `nt2`.`k` cannot be resolved. Did you mean one of the following? [`__auto_generated_subquery_name`.`k`, `__auto_generated_subquery_name`.`v1`, `__auto_generated_subquery_name`.`v2`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`nt2`.`k`","`__auto_generated_subquery_name`.`k`, `__auto_generated_subquery_name`.`v1`, `__auto_generated_subquery_name`.`v2`"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out b/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out index 581d7d4ae2d39..bc17092f0a0dc 100644 --- a/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out @@ -72,7 +72,7 @@ select * from data order by 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -ORDER BY position 0 is not in select list (valid range is [1, 2]); line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -81,7 +81,7 @@ select * from data order by -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -ORDER BY position -1 is not in select list (valid range is [1, 2]); line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -90,7 +90,7 @@ select * from data order by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -ORDER BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out b/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out index f124dcc322e12..1e399e7667823 100644 --- a/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out @@ -178,7 +178,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot specify order by or frame for 'percentile_cont'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -194,7 +194,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot specify order by or frame for 'percentile_disc'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -209,7 +209,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot specify order by or frame for 'median'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -225,7 +225,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot specify order by or frame for 'percentile_cont'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -241,7 +241,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot specify order by or frame for 'percentile_disc'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -256,7 +256,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot specify order by or frame for 'median'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -336,7 +336,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot specify order by or frame for 'percentile_cont'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -353,7 +353,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot specify order by or frame for 'percentile_disc'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -369,7 +369,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot specify order by or frame for 'median'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -386,7 +386,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot specify order by or frame for 'percentile_cont'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -403,7 +403,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot specify order by or frame for 'percentile_disc'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -419,7 +419,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -Cannot specify order by or frame for 'median'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/pivot.sql.out b/sql/core/src/test/resources/sql-tests/results/pivot.sql.out index f739131c62773..4af0434622598 100644 --- a/sql/core/src/test/resources/sql-tests/results/pivot.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/pivot.sql.out @@ -199,7 +199,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -Aggregate expression required for pivot, but 'coursesales.earnings' did not appear in any aggregate function. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -214,7 +214,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -Aggregate expression required for pivot, but '__auto_generated_subquery_name.year' did not appear in any aggregate function. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -229,7 +229,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `year` cannot be resolved. Did you mean one of the following? [`__auto_generated_subquery_name`.`course`, `__auto_generated_subquery_name`.`earnings`]; line 4 pos 0 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`year`","`__auto_generated_subquery_name`.`course`, `__auto_generated_subquery_name`.`earnings`"],"queryContext":[]} -- !query @@ -259,7 +259,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -310,7 +310,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -[PIVOT_VALUE_DATA_TYPE_MISMATCH] Invalid pivot value 'dotNET': value data type string does not match pivot column data type struct +{"errorClass":"PIVOT_VALUE_DATA_TYPE_MISMATCH","sqlState":"42000","messageParameters":["dotNET","string","struct"],"queryContext":[]} -- !query @@ -323,7 +323,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `s` cannot be resolved. Did you mean one of the following? [`coursesales`.`year`, `coursesales`.`course`, `coursesales`.`earnings`]; line 4 pos 15 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`s`","`coursesales`.`year`, `coursesales`.`course`, `coursesales`.`earnings`"],"queryContext":[]} -- !query @@ -336,7 +336,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -[NON_LITERAL_PIVOT_VALUES] Literal expressions required for pivot values, found "course". +{"errorClass":"NON_LITERAL_PIVOT_VALUES","sqlState":"42000","messageParameters":["\"course\""],"queryContext":[]} -- !query @@ -455,7 +455,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -[INCOMPARABLE_PIVOT_COLUMN] Invalid pivot column `__auto_generated_subquery_name`.`m`. Pivot columns must be comparable. +{"errorClass":"INCOMPARABLE_PIVOT_COLUMN","sqlState":"42000","messageParameters":["`__auto_generated_subquery_name`.`m`"],"queryContext":[]} -- !query @@ -472,7 +472,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -[INCOMPARABLE_PIVOT_COLUMN] Invalid pivot column `named_struct('course', __auto_generated_subquery_name`.`course, 'm', __auto_generated_subquery_name`.`m)`. Pivot columns must be comparable. +{"errorClass":"INCOMPARABLE_PIVOT_COLUMN","sqlState":"42000","messageParameters":["`named_struct('course', __auto_generated_subquery_name`.`course, 'm', __auto_generated_subquery_name`.`m)`"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out index a0978af360644..ec57cbba057e0 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out @@ -484,7 +484,7 @@ having exists (select 1 from onek b struct<> -- !query output org.apache.spark.sql.AnalysisException -Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: sum(DISTINCT (outer(a.four) + b.four)) +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -495,4 +495,4 @@ from tenk1 o struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `o`.`unique1` cannot be resolved. Did you mean one of the following? [`i`.`unique1`, `i`.`unique2`, `i`.`hundred`, `i`.`even`, `i`.`four`, `i`.`stringu1`, `i`.`ten`, `i`.`odd`, `i`.`string4`, `i`.`stringu2`, `i`.`tenthous`, `i`.`twenty`, `i`.`two`, `i`.`thousand`, `i`.`fivethous`, `i`.`twothousand`]; line 2 pos 63 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`o`.`unique1`","`i`.`unique1`, `i`.`unique2`, `i`.`hundred`, `i`.`even`, `i`.`four`, `i`.`stringu1`, `i`.`ten`, `i`.`odd`, `i`.`string4`, `i`.`stringu2`, `i`.`tenthous`, `i`.`twenty`, `i`.`two`, `i`.`thousand`, `i`.`fivethous`, `i`.`twothousand`"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out index 450dd5ca743cc..f5b3286795aba 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out @@ -5,7 +5,7 @@ select max(min(unique1)) from tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out index 83020db06d9b2..a5095b85a0d83 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out @@ -53,10 +53,7 @@ SELECT boolean('test') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value 'test' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('test') AS error - ^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'test'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"boolean('test'"}]} -- !query @@ -73,10 +70,7 @@ SELECT boolean('foo') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value 'foo' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('foo') AS error - ^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'foo'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('foo'"}]} -- !query @@ -101,10 +95,7 @@ SELECT boolean('yeah') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value 'yeah' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('yeah') AS error - ^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'yeah'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"boolean('yeah'"}]} -- !query @@ -129,10 +120,7 @@ SELECT boolean('nay') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value 'nay' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('nay') AS error - ^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'nay'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('nay'"}]} -- !query @@ -141,10 +129,7 @@ SELECT boolean('on') AS true struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value 'on' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('on') AS true - ^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'on'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":19,"fragment":"boolean('on'"}]} -- !query @@ -153,10 +138,7 @@ SELECT boolean('off') AS `false` struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value 'off' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('off') AS `false` - ^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'off'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('off'"}]} -- !query @@ -165,10 +147,7 @@ SELECT boolean('of') AS `false` struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value 'of' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('of') AS `false` - ^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'of'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":19,"fragment":"boolean('of'"}]} -- !query @@ -177,10 +156,7 @@ SELECT boolean('o') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value 'o' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('o') AS error - ^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'o'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":18,"fragment":"boolean('o'"}]} -- !query @@ -189,10 +165,7 @@ SELECT boolean('on_') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value 'on_' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('on_') AS error - ^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'on_'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('on_'"}]} -- !query @@ -201,10 +174,7 @@ SELECT boolean('off_') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value 'off_' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('off_') AS error - ^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'off_'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"boolean('off_'"}]} -- !query @@ -221,10 +191,7 @@ SELECT boolean('11') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value '11' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('11') AS error - ^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'11'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":19,"fragment":"boolean('11'"}]} -- !query @@ -241,10 +208,7 @@ SELECT boolean('000') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value '000' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('000') AS error - ^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'000'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('000'"}]} -- !query @@ -253,10 +217,7 @@ SELECT boolean('') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value '' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean('') AS error - ^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["''","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":17,"fragment":"boolean(''"}]} -- !query @@ -362,10 +323,7 @@ SELECT boolean(string(' tru e ')) AS invalid struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value ' tru e ' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean(string(' tru e ')) AS invalid - ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["' tru e '","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":33,"fragment":"boolean(string(' tru e ')"}]} -- !query @@ -374,10 +332,7 @@ SELECT boolean(string('')) AS invalid struct<> -- !query output org.apache.spark.SparkRuntimeException -[CAST_INVALID_INPUT] The value '' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT boolean(string('')) AS invalid - ^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["''","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":25,"fragment":"boolean(string('')"}]} -- !query @@ -521,11 +476,7 @@ INSERT INTO BOOLTBL2 struct<> -- !query output org.apache.spark.sql.AnalysisException -failed to evaluate expression CAST('XXX' AS BOOLEAN): [CAST_INVALID_INPUT] The value 'XXX' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 2, position 12) == - VALUES (boolean('XXX')) - ^^^^^^^^^^^^^^ -; line 2 pos 3 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out index 3e2c98568831e..e488bc5ef5095 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out @@ -53,7 +53,7 @@ CREATE VIEW key_dependent_view AS struct<> -- !query output org.apache.spark.sql.AnalysisException -expression 'spark_catalog.default.view_base_table.data' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -63,7 +63,7 @@ CREATE VIEW key_dependent_view_no_cols AS struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `FROM` cannot be resolved. Did you mean one of the following? []; line 2 pos 10 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`FROM`",""],"queryContext":[]} -- !query @@ -257,7 +257,7 @@ CREATE VIEW v1_temp AS SELECT * FROM temp_table struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v1_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -313,7 +313,7 @@ CREATE VIEW temp_view_test.v3_temp AS SELECT * FROM temp_table struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v3_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -362,7 +362,7 @@ CREATE VIEW v4_temp AS struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v4_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -374,7 +374,7 @@ CREATE VIEW v5_temp AS struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v5_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -533,7 +533,7 @@ CREATE VIEW v6_temp AS SELECT * FROM base_table WHERE id IN (SELECT id FROM temp struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v6_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -542,7 +542,7 @@ CREATE VIEW v7_temp AS SELECT t1.id, t2.a FROM base_table t1, (SELECT * FROM tem struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v7_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -551,7 +551,7 @@ CREATE VIEW v8_temp AS SELECT * FROM base_table WHERE EXISTS (SELECT 1 FROM temp struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v8_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -560,7 +560,7 @@ CREATE VIEW v9_temp AS SELECT * FROM base_table WHERE NOT EXISTS (SELECT 1 FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v9_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -569,7 +569,7 @@ CREATE VIEW v10_temp AS SELECT * FROM v7_temp struct<> -- !query output org.apache.spark.sql.AnalysisException -Table or view not found: v7_temp; line 1 pos 38 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -578,7 +578,7 @@ CREATE VIEW v11_temp AS SELECT t1.id, t2.a FROM base_table t1, v10_temp t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -Table or view not found: v10_temp; line 1 pos 63 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -587,7 +587,7 @@ CREATE VIEW v12_temp AS SELECT true FROM v11_temp struct<> -- !query output org.apache.spark.sql.AnalysisException -Table or view not found: v11_temp; line 1 pos 41 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -669,7 +669,7 @@ CREATE VIEW temporal1 AS SELECT * FROM t1 CROSS JOIN tt struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal1` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -710,7 +710,7 @@ CREATE VIEW temporal2 AS SELECT * FROM t1 INNER JOIN tt ON t1.num = tt.num2 struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal2` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -751,7 +751,7 @@ CREATE VIEW temporal3 AS SELECT * FROM t1 LEFT JOIN tt ON t1.num = tt.num2 struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal3` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -792,7 +792,7 @@ CREATE VIEW temporal4 AS SELECT * FROM t1 LEFT JOIN tt ON t1.num = tt.num2 AND t struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal4` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -801,7 +801,7 @@ CREATE VIEW temporal5 AS SELECT * FROM t1 WHERE num IN (SELECT num FROM t1 WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal5` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out index 7edda980be3fd..ff959dba2e3ad 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out @@ -198,12 +198,7 @@ SELECT date '1999 Jan 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 1999 Jan 08(line 1, pos 7) - -== SQL == -SELECT date '1999 Jan 08' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -212,12 +207,7 @@ SELECT date '1999 08 Jan' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 1999 08 Jan(line 1, pos 7) - -== SQL == -SELECT date '1999 08 Jan' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -242,12 +232,7 @@ SELECT date '1999 01 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 1999 01 08(line 1, pos 7) - -== SQL == -SELECT date '1999 01 08' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -256,12 +241,7 @@ SELECT date '1999 08 01' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 1999 08 01(line 1, pos 7) - -== SQL == -SELECT date '1999 08 01' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -278,12 +258,7 @@ SELECT date '1999 Jan 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 1999 Jan 08(line 1, pos 7) - -== SQL == -SELECT date '1999 Jan 08' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -292,12 +267,7 @@ SELECT date '1999 08 Jan' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 1999 08 Jan(line 1, pos 7) - -== SQL == -SELECT date '1999 08 Jan' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -322,12 +292,7 @@ SELECT date '1999 01 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 1999 01 08(line 1, pos 7) - -== SQL == -SELECT date '1999 01 08' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -336,12 +301,7 @@ SELECT date '1999 08 01' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 1999 08 01(line 1, pos 7) - -== SQL == -SELECT date '1999 08 01' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -366,12 +326,7 @@ SELECT date '1999 Jan 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 1999 Jan 08(line 1, pos 7) - -== SQL == -SELECT date '1999 Jan 08' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -380,12 +335,7 @@ SELECT date '1999 08 Jan' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 1999 08 Jan(line 1, pos 7) - -== SQL == -SELECT date '1999 08 Jan' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -410,12 +360,7 @@ SELECT date '1999 01 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 1999 01 08(line 1, pos 7) - -== SQL == -SELECT date '1999 01 08' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -424,12 +369,7 @@ SELECT date '1999 08 01' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the DATE value: 1999 08 01(line 1, pos 7) - -== SQL == -SELECT date '1999 08 01' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out index 4002a41295fa2..2be70bb8db988 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out @@ -93,10 +93,7 @@ SELECT float('N A N') struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'N A N' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT float('N A N') - ^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'N A N'","\"STRING\"","\"FLOAT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"float('N A N'"}]} -- !query @@ -105,10 +102,7 @@ SELECT float('NaN x') struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'NaN x' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT float('NaN x') - ^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'NaN x'","\"STRING\"","\"FLOAT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"float('NaN x'"}]} -- !query @@ -117,10 +111,7 @@ SELECT float(' INFINITY x') struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value ' INFINITY x' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT float(' INFINITY x') - ^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["' INFINITY x'","\"STRING\"","\"FLOAT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"float(' INFINITY x'"}]} -- !query @@ -153,10 +144,7 @@ SELECT float(decimal('nan')) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'nan' of the type "STRING" cannot be cast to "DECIMAL(10,0)" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 14) == -SELECT float(decimal('nan')) - ^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'nan'","\"STRING\"","\"DECIMAL(10,0)\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":13,"stopIndex":26,"fragment":"decimal('nan'"}]} -- !query @@ -337,7 +325,7 @@ SELECT int(float('2147483647')) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value 2.14748365E9 of the type "FLOAT" cannot be cast to "INT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["2.14748365E9","\"FLOAT\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -354,7 +342,7 @@ SELECT int(float('-2147483900')) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value -2.1474839E9 of the type "FLOAT" cannot be cast to "INT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["-2.1474839E9","\"FLOAT\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -387,7 +375,7 @@ SELECT bigint(float('-9223380000000000000')) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value -9.22338E18 of the type "FLOAT" cannot be cast to "BIGINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["-9.22338E18","\"FLOAT\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out index ddc2b70250b02..8968bee9a31ac 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out @@ -125,10 +125,7 @@ SELECT double('N A N') struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'N A N' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT double('N A N') - ^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'N A N'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"double('N A N'"}]} -- !query @@ -137,10 +134,7 @@ SELECT double('NaN x') struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'NaN x' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT double('NaN x') - ^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'NaN x'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"double('NaN x'"}]} -- !query @@ -149,10 +143,7 @@ SELECT double(' INFINITY x') struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value ' INFINITY x' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT double(' INFINITY x') - ^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["' INFINITY x'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":30,"fragment":"double(' INFINITY x'"}]} -- !query @@ -185,10 +176,7 @@ SELECT double(decimal('nan')) struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'nan' of the type "STRING" cannot be cast to "DECIMAL(10,0)" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 15) == -SELECT double(decimal('nan')) - ^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'nan'","\"STRING\"","\"DECIMAL(10,0)\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":14,"stopIndex":27,"fragment":"decimal('nan'"}]} -- !query @@ -842,7 +830,7 @@ SELECT bigint(double('-9223372036854780000')) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value -9.22337203685478E18D of the type "DOUBLE" cannot be cast to "BIGINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["-9.22337203685478E18D","\"DOUBLE\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out index 745633e157a83..1ee88355f9b5d 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out @@ -196,11 +196,8 @@ SELECT '' AS five, i.f1, i.f1 * smallint('2') AS x FROM INT4_TBL i -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[ARITHMETIC_OVERFLOW] integer overflow. Use 'try_multiply' to tolerate overflow and return NULL instead. If necessary set spark.sql.ansi.enabled to "false" to bypass this error. -== SQL(line 1, position 26) == -SELECT '' AS five, i.f1, i.f1 * smallint('2') AS x FROM INT4_TBL i - ^^^^^^^^^^^^^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -219,11 +216,8 @@ SELECT '' AS five, i.f1, i.f1 * int('2') AS x FROM INT4_TBL i -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[ARITHMETIC_OVERFLOW] integer overflow. Use 'try_multiply' to tolerate overflow and return NULL instead. If necessary set spark.sql.ansi.enabled to "false" to bypass this error. -== SQL(line 1, position 26) == -SELECT '' AS five, i.f1, i.f1 * int('2') AS x FROM INT4_TBL i - ^^^^^^^^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -242,11 +236,8 @@ SELECT '' AS five, i.f1, i.f1 + smallint('2') AS x FROM INT4_TBL i -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[ARITHMETIC_OVERFLOW] integer overflow. Use 'try_add' to tolerate overflow and return NULL instead. If necessary set spark.sql.ansi.enabled to "false" to bypass this error. -== SQL(line 1, position 26) == -SELECT '' AS five, i.f1, i.f1 + smallint('2') AS x FROM INT4_TBL i - ^^^^^^^^^^^^^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -266,11 +257,8 @@ SELECT '' AS five, i.f1, i.f1 + int('2') AS x FROM INT4_TBL i -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[ARITHMETIC_OVERFLOW] integer overflow. Use 'try_add' to tolerate overflow and return NULL instead. If necessary set spark.sql.ansi.enabled to "false" to bypass this error. -== SQL(line 1, position 26) == -SELECT '' AS five, i.f1, i.f1 + int('2') AS x FROM INT4_TBL i - ^^^^^^^^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -290,11 +278,8 @@ SELECT '' AS five, i.f1, i.f1 - smallint('2') AS x FROM INT4_TBL i -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[ARITHMETIC_OVERFLOW] integer overflow. Use 'try_subtract' to tolerate overflow and return NULL instead. If necessary set spark.sql.ansi.enabled to "false" to bypass this error. -== SQL(line 1, position 26) == -SELECT '' AS five, i.f1, i.f1 - smallint('2') AS x FROM INT4_TBL i - ^^^^^^^^^^^^^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -314,11 +299,8 @@ SELECT '' AS five, i.f1, i.f1 - int('2') AS x FROM INT4_TBL i -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[ARITHMETIC_OVERFLOW] integer overflow. Use 'try_subtract' to tolerate overflow and return NULL instead. If necessary set spark.sql.ansi.enabled to "false" to bypass this error. -== SQL(line 1, position 26) == -SELECT '' AS five, i.f1, i.f1 - int('2') AS x FROM INT4_TBL i - ^^^^^^^^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out index ab77dbe6a6029..f5901bdb1cf2a 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out @@ -388,11 +388,8 @@ SELECT '' AS three, q1, q2, q1 * q2 AS multiply FROM INT8_TBL -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[ARITHMETIC_OVERFLOW] long overflow. Use 'try_multiply' to tolerate overflow and return NULL instead. If necessary set spark.sql.ansi.enabled to "false" to bypass this error. -== SQL(line 1, position 29) == -SELECT '' AS three, q1, q2, q1 * q2 AS multiply FROM INT8_TBL - ^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -647,10 +644,7 @@ select bigint('9223372036854775800') / bigint('0') struct<> -- !query output org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select bigint('9223372036854775800') / bigint('0') - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":49,"fragment":"bigint('9223372036854775800') / bigint('0'"}]} -- !query @@ -659,10 +653,7 @@ select bigint('-9223372036854775808') / smallint('0') struct<> -- !query output org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select bigint('-9223372036854775808') / smallint('0') - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":52,"fragment":"bigint('-9223372036854775808') / smallint('0'"}]} -- !query @@ -671,10 +662,7 @@ select smallint('100') / bigint('0') struct<> -- !query output org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select smallint('100') / bigint('0') - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":35,"fragment":"smallint('100') / bigint('0'"}]} -- !query @@ -690,8 +678,8 @@ SELECT CAST(q1 AS int) FROM int8_tbl WHERE q2 <> 456 -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value 4567890123456789L of the type "BIGINT" cannot be cast to "INT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -707,8 +695,8 @@ SELECT CAST(q1 AS smallint) FROM int8_tbl WHERE q2 <> 456 -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value 4567890123456789L of the type "BIGINT" cannot be cast to "SMALLINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -745,7 +733,7 @@ SELECT CAST(double('922337203685477580700.0') AS bigint) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value 9.223372036854776E20D of the type "DOUBLE" cannot be cast to "BIGINT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["9.223372036854776E20D","\"DOUBLE\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -783,12 +771,7 @@ SELECT * FROM range(bigint('+4567890123456789'), bigint('+4567890123456799'), 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -Table-valued function range with alternatives: - range(start: long, end: long, step: long, numSlices: integer) - range(start: long, end: long, step: long) - range(start: long, end: long) - range(end: long) -cannot be applied to (long, long, integer): requirement failed: step (0) cannot be 0; line 1 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -817,7 +800,7 @@ SELECT string(int(shiftleft(bigint(-1), 63))+1) struct<> -- !query output org.apache.spark.SparkArithmeticException -[CAST_OVERFLOW] The value -9223372036854775808L of the type "BIGINT" cannot be cast to "INT" due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["-9223372036854775808L","\"BIGINT\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -826,10 +809,7 @@ SELECT bigint((-9223372036854775808)) * bigint((-1)) struct<> -- !query output org.apache.spark.SparkArithmeticException -[ARITHMETIC_OVERFLOW] long overflow. Use 'try_multiply' to tolerate overflow and return NULL instead. If necessary set spark.sql.ansi.enabled to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT bigint((-9223372036854775808)) * bigint((-1)) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["long overflow"," Use 'try_multiply' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":51,"fragment":"bigint((-9223372036854775808)) * bigint((-1)"}]} -- !query @@ -854,10 +834,7 @@ SELECT bigint((-9223372036854775808)) * int((-1)) struct<> -- !query output org.apache.spark.SparkArithmeticException -[ARITHMETIC_OVERFLOW] long overflow. Use 'try_multiply' to tolerate overflow and return NULL instead. If necessary set spark.sql.ansi.enabled to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT bigint((-9223372036854775808)) * int((-1)) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["long overflow"," Use 'try_multiply' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":48,"fragment":"bigint((-9223372036854775808)) * int((-1)"}]} -- !query @@ -882,10 +859,7 @@ SELECT bigint((-9223372036854775808)) * smallint((-1)) struct<> -- !query output org.apache.spark.SparkArithmeticException -[ARITHMETIC_OVERFLOW] long overflow. Use 'try_multiply' to tolerate overflow and return NULL instead. If necessary set spark.sql.ansi.enabled to "false" to bypass this error. -== SQL(line 1, position 8) == -SELECT bigint((-9223372036854775808)) * smallint((-1)) - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["long overflow"," Use 'try_multiply' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":53,"fragment":"bigint((-9223372036854775808)) * smallint((-1)"}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out index 04df07bff577b..fa3e99f676b45 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out @@ -101,12 +101,7 @@ SELECT interval '1 2:03' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -SELECT interval '1 2:03' day to hour -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -115,12 +110,7 @@ SELECT interval '1 2:03:04' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -SELECT interval '1 2:03:04' day to hour -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -137,12 +127,7 @@ SELECT interval '1 2:03:04' day to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -SELECT interval '1 2:03:04' day to minute -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -151,12 +136,7 @@ SELECT interval '1 2:03' day to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -SELECT interval '1 2:03' day to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -173,12 +153,7 @@ SELECT interval '1 2:03' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -SELECT interval '1 2:03' hour to minute -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -187,12 +162,7 @@ SELECT interval '1 2:03:04' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -SELECT interval '1 2:03:04' hour to minute -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -201,12 +171,7 @@ SELECT interval '1 2:03' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -SELECT interval '1 2:03' hour to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -215,12 +180,7 @@ SELECT interval '1 2:03:04' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -SELECT interval '1 2:03:04' hour to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -229,12 +189,7 @@ SELECT interval '1 2:03' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -SELECT interval '1 2:03' minute to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -243,9 +198,4 @@ SELECT interval '1 2:03:04' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Interval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) - -== SQL == -SELECT interval '1 2:03:04' minute to second -----------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out index 465f1d3b04158..749c23a697bde 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out @@ -546,7 +546,7 @@ SELECT '' AS `xxx`, i, k, t struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'i' is ambiguous, could be: spark_catalog.default.j1_tbl.i, spark_catalog.default.j2_tbl.i.; line 1 pos 20 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3235,7 +3235,7 @@ select * from struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'f1' is ambiguous, could be: j.f1, j.f1.; line 2 pos 63 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3245,7 +3245,7 @@ select * from struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `y`.`f1` cannot be resolved. Did you mean one of the following? [`j`.`f1`, `j`.`f1`, `x`.`q1`, `x`.`q2`]; line 2 pos 63 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`y`.`f1`","`j`.`f1`, `j`.`f1`, `x`.`q1`, `x`.`q2`"],"queryContext":[]} -- !query @@ -3264,7 +3264,7 @@ select t1.uunique1 from struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `t1`.`uunique1` cannot be resolved. Did you mean one of the following? [`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`stringu1`, `t1`.`even`, `t1`.`four`, `t1`.`string4`, `t2`.`stringu1`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`even`, `t2`.`four`, `t1`.`odd`, `t2`.`string4`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`odd`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t1`.`uunique1`","`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`stringu1`, `t1`.`even`, `t1`.`four`, `t1`.`string4`, `t2`.`stringu1`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`even`, `t2`.`four`, `t1`.`odd`, `t2`.`string4`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`odd`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`"],"queryContext":[]} -- !query @@ -3274,7 +3274,7 @@ select t2.uunique1 from struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `t2`.`uunique1` cannot be resolved. Did you mean one of the following? [`t2`.`unique1`, `t1`.`unique1`, `t2`.`unique2`, `t1`.`unique2`, `t2`.`hundred`, `t1`.`hundred`, `t2`.`stringu1`, `t2`.`even`, `t2`.`four`, `t2`.`string4`, `t1`.`stringu1`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`even`, `t1`.`four`, `t2`.`odd`, `t1`.`string4`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`odd`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`twothousand`, `t1`.`twothousand`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t2`.`uunique1`","`t2`.`unique1`, `t1`.`unique1`, `t2`.`unique2`, `t1`.`unique2`, `t2`.`hundred`, `t1`.`hundred`, `t2`.`stringu1`, `t2`.`even`, `t2`.`four`, `t2`.`string4`, `t1`.`stringu1`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`even`, `t1`.`four`, `t2`.`odd`, `t1`.`string4`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`odd`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`twothousand`, `t1`.`twothousand`"],"queryContext":[]} -- !query @@ -3284,7 +3284,7 @@ select uunique1 from struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `uunique1` cannot be resolved. Did you mean one of the following? [`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`even`, `t2`.`even`, `t1`.`four`, `t2`.`four`, `t1`.`ten`, `t2`.`ten`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`odd`, `t2`.`odd`, `t1`.`two`, `t2`.`two`, `t1`.`stringu1`, `t2`.`stringu1`, `t1`.`twenty`, `t2`.`twenty`, `t1`.`string4`, `t2`.`string4`, `t1`.`stringu2`, `t2`.`stringu2`, `t1`.`tenthous`, `t2`.`tenthous`, `t1`.`thousand`, `t2`.`thousand`, `t1`.`fivethous`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`uunique1`","`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`even`, `t2`.`even`, `t1`.`four`, `t2`.`four`, `t1`.`ten`, `t2`.`ten`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`odd`, `t2`.`odd`, `t1`.`two`, `t2`.`two`, `t1`.`stringu1`, `t2`.`stringu1`, `t1`.`twenty`, `t2`.`twenty`, `t1`.`string4`, `t2`.`string4`, `t1`.`stringu2`, `t2`.`stringu2`, `t1`.`tenthous`, `t2`.`tenthous`, `t1`.`thousand`, `t2`.`thousand`, `t1`.`fivethous`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`"],"queryContext":[]} -- !query @@ -3484,7 +3484,7 @@ select f1,g from int4_tbl a, (select f1 as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `f1` cannot be resolved. Did you mean one of the following? []; line 1 pos 37 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`f1`",""],"queryContext":[]} -- !query @@ -3493,7 +3493,7 @@ select f1,g from int4_tbl a, (select a.f1 as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `a`.`f1` cannot be resolved. Did you mean one of the following? []; line 1 pos 37 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`.`f1`",""],"queryContext":[]} -- !query @@ -3502,7 +3502,7 @@ select f1,g from int4_tbl a cross join (select f1 as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `f1` cannot be resolved. Did you mean one of the following? []; line 1 pos 47 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`f1`",""],"queryContext":[]} -- !query @@ -3511,7 +3511,7 @@ select f1,g from int4_tbl a cross join (select a.f1 as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `a`.`f1` cannot be resolved. Did you mean one of the following? []; line 1 pos 47 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`.`f1`",""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out index ece34bf3f1c66..5798461b9ce44 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out @@ -131,7 +131,7 @@ select * from int8_tbl limit (case when random() < 0.5 then bigint(null) end) struct<> -- !query output org.apache.spark.sql.AnalysisException -The limit expression must evaluate to a constant value, but got CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN CAST(NULL AS BIGINT) END +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -140,7 +140,7 @@ select * from int8_tbl offset (case when random() < 0.5 then bigint(null) end) struct<> -- !query output org.apache.spark.sql.AnalysisException -The offset expression must evaluate to a constant value, but got CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN CAST(NULL AS BIGINT) END +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out index c4c24d5ed8615..df3df33ff4ffc 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out @@ -3581,10 +3581,7 @@ INSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.423308199106402476 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -decimal can only support precision up to 38 -== SQL == -INSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.42330819910640247627) +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3827,7 +3824,7 @@ INSERT INTO num_result SELECT t1.id, t2.id, t1.val, t2.val, t1.val * t2.val struct<> -- !query output org.apache.spark.sql.AnalysisException -`spark_catalog`.`default`.`num_result` requires that the data to be inserted have the same number of columns as the target table: target table has 3 column(s) but the inserted data has 5 column(s), including 0 partition column(s) having constant value(s). +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4693,7 +4690,7 @@ SELECT '' AS to_number_2, to_number('-34,338,492.654,878', '99G999G999D999G999' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'to_number('-34,338,492.654,878', '99G999G999D999G999')' due to data type mismatch: Thousands separators (, or G) may not appear after the decimal point in the number format: '99G999G999D999G999'; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4742,7 +4739,7 @@ SELECT '' AS to_number_15, to_number('123,000','999G') struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'to_number('123,000', '999G')' due to data type mismatch: Thousands separators (, or G) must have digits in between them in the number format: '999G'; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out index 87e0abb285d85..3b4adb5aebe28 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out @@ -140,7 +140,7 @@ SELECT a FROM test_having HAVING min(a) < max(a) struct<> -- !query output org.apache.spark.sql.AnalysisException -grouping expressions sequence is empty, and 'spark_catalog.default.test_having.a' is not an aggregate function. Wrap '(min(spark_catalog.default.test_having.a) AS `min(a#x)`, max(spark_catalog.default.test_having.a) AS `max(a#x)`)' in windowing function(s) or wrap 'spark_catalog.default.test_having.a' in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -149,7 +149,7 @@ SELECT 1 AS one FROM test_having HAVING a > 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `a` cannot be resolved. Did you mean one of the following? [`one`]; line 1 pos 40 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`","`one`"],"queryContext":[]} -- !query @@ -173,11 +173,8 @@ SELECT 1 AS one FROM test_having WHERE 1/a = 1 HAVING 1 < 2 -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 40) == -...1 AS one FROM test_having WHERE 1/a = 1 HAVING 1 < 2 - ^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out index eea7c3de20336..1021d9027e7c5 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out @@ -119,7 +119,7 @@ SELECT count(*) FROM test_missing_target GROUP BY a ORDER BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `b` cannot be resolved. Did you mean one of the following? [`count(1)`]; line 1 pos 61 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`b`","`count(1)`"],"queryContext":[]} -- !query @@ -200,7 +200,7 @@ SELECT c, count(*) FROM test_missing_target GROUP BY 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 53 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -211,7 +211,7 @@ SELECT count(*) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 10 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -324,7 +324,7 @@ SELECT count(a) FROM test_missing_target GROUP BY a ORDER BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `b` cannot be resolved. Did you mean one of the following? [`count(a)`]; line 1 pos 61 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`b`","`count(a)`"],"queryContext":[]} -- !query @@ -387,7 +387,7 @@ SELECT count(x.a) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 10 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -411,7 +411,7 @@ SELECT count(b) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'b' is ambiguous, could be: x.b, y.b.; line 1 pos 13 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out index 81f964b4bea07..586fe4446fef5 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out @@ -443,7 +443,7 @@ SELECT 'maca' LIKE 'm%aca' ESCAPE '%' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -the pattern 'm%aca' is invalid, the escape character is not allowed to precede 'a' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -452,7 +452,7 @@ SELECT 'maca' NOT LIKE 'm%aca' ESCAPE '%' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -the pattern 'm%aca' is invalid, the escape character is not allowed to precede 'a' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -461,7 +461,7 @@ SELECT 'ma%a' LIKE 'm%a%%a' ESCAPE '%' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -the pattern 'm%a%%a' is invalid, the escape character is not allowed to precede 'a' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -470,7 +470,7 @@ SELECT 'ma%a' NOT LIKE 'm%a%%a' ESCAPE '%' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -the pattern 'm%a%%a' is invalid, the escape character is not allowed to precede 'a' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -479,7 +479,7 @@ SELECT 'bear' LIKE 'b_ear' ESCAPE '_' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -the pattern 'b_ear' is invalid, the escape character is not allowed to precede 'e' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -488,7 +488,7 @@ SELECT 'bear' NOT LIKE 'b_ear' ESCAPE '_' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -the pattern 'b_ear' is invalid, the escape character is not allowed to precede 'e' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -497,7 +497,7 @@ SELECT 'be_r' LIKE 'b_e__r' ESCAPE '_' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -the pattern 'b_e__r' is invalid, the escape character is not allowed to precede 'e' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -506,7 +506,7 @@ SELECT 'be_r' NOT LIKE 'b_e__r' ESCAPE '_' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -the pattern 'b_e__r' is invalid, the escape character is not allowed to precede 'e' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out index 65db4be077cc7..474e6d0b81fa4 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out @@ -62,10 +62,7 @@ select string('four: ') || 2+2 struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'four: 2' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select string('four: ') || 2+2 - ^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'four: 2'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"string('four: ') || 2+"}]} -- !query @@ -74,10 +71,7 @@ select 'four: ' || 2+2 struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'four: 2' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 8) == -select 'four: ' || 2+2 - ^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'four: 2'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"'four: ' || 2+"}]} -- !query @@ -282,7 +276,7 @@ select format_string('%0$s', 'Hello') struct<> -- !query output org.apache.spark.sql.AnalysisException -[INVALID_PARAMETER_VALUE] The value of parameter(s) 'strfmt' in `format_string` is invalid: expects %1$, %2$ and so on, but got %0$.; line 1 pos 7 +{"errorClass":"INVALID_PARAMETER_VALUE","sqlState":"22023","messageParameters":["strfmt","`format_string`","expects %1$, %2$ and so on, but got %0$."],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out index 18bc925cefc27..dfa5150ce4fb5 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out @@ -76,12 +76,7 @@ SELECT 1 AS three UNION SELECT 2 UNION SELECT 3 ORDER BY 1 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'SELECT'(line 1, pos 39) - -== SQL == -SELECT 1 AS three UNION SELECT 2 UNION SELECT 3 ORDER BY 1 ----------------------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} -- !query @@ -90,12 +85,7 @@ SELECT 1 AS two UNION SELECT 2 UNION SELECT 2 ORDER BY 1 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'SELECT'(line 1, pos 37) - -== SQL == -SELECT 1 AS two UNION SELECT 2 UNION SELECT 2 ORDER BY 1 --------------------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} -- !query @@ -167,12 +157,7 @@ SELECT 1.1 AS three UNION SELECT 2 UNION SELECT 3 ORDER BY 1 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'SELECT'(line 1, pos 41) - -== SQL == -SELECT 1.1 AS three UNION SELECT 2 UNION SELECT 3 ORDER BY 1 ------------------------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} -- !query @@ -181,12 +166,7 @@ SELECT double(1.1) AS two UNION SELECT 2 UNION SELECT double(2.0) ORDER BY 1 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'SELECT'(line 1, pos 47) - -== SQL == -SELECT double(1.1) AS two UNION SELECT 2 UNION SELECT double(2.0) ORDER BY 1 ------------------------------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} -- !query @@ -377,12 +357,7 @@ struct struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'SELECT'(line 1, pos 20) - -== SQL == -(SELECT 1,2,3 UNION SELECT 4,5,6) INTERSECT SELECT 4,5,6 ---------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} -- !query @@ -391,12 +366,7 @@ org.apache.spark.sql.catalyst.parser.ParseException struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'SELECT'(line 1, pos 20) - -== SQL == -(SELECT 1,2,3 UNION SELECT 4,5,6 ORDER BY 1,2) INTERSECT SELECT 4,5,6 ---------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} -- !query @@ -405,12 +375,7 @@ org.apache.spark.sql.catalyst.parser.ParseException struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'SELECT'(line 1, pos 20) - -== SQL == -(SELECT 1,2,3 UNION SELECT 4,5,6) EXCEPT SELECT 4,5,6 ---------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} -- !query @@ -419,12 +384,7 @@ org.apache.spark.sql.catalyst.parser.ParseException struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'SELECT'(line 1, pos 20) - -== SQL == -(SELECT 1,2,3 UNION SELECT 4,5,6 ORDER BY 1,2) EXCEPT SELECT 4,5,6 ---------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} -- !query @@ -565,7 +525,7 @@ SELECT q1 FROM int8_tbl EXCEPT SELECT q2 FROM int8_tbl ORDER BY q2 LIMIT 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `q2` cannot be resolved. Did you mean one of the following? [`int8_tbl`.`q1`]; line 1 pos 64 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`q2`","`int8_tbl`.`q1`"],"queryContext":[]} -- !query @@ -724,12 +684,7 @@ SELECT cast('3.4' as decimal(38, 18)) UNION SELECT 'foo' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'SELECT'(line 1, pos 44) - -== SQL == -SELECT cast('3.4' as decimal(38, 18)) UNION SELECT 'foo' ---------------------------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out index 127e98091691b..a2d16a5a4435a 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out @@ -221,8 +221,8 @@ from range(9223372036854775804, 9223372036854775807) x -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[ARITHMETIC_OVERFLOW] long overflow. Use 'try_add' to tolerate overflow and return NULL instead. If necessary set spark.sql.ansi.enabled to "false" to bypass this error. +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -231,8 +231,8 @@ from range(-9223372036854775806, -9223372036854775805) x -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[ARITHMETIC_OVERFLOW] long overflow. Use 'try_add' to tolerate overflow and return NULL instead. If necessary set spark.sql.ansi.enabled to "false" to bypass this error. +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -459,12 +459,7 @@ window w as (order by f_numeric range between struct<> -- !query output org.apache.spark.SparkNumberFormatException -[CAST_INVALID_INPUT] The value 'NaN' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 3, position 13) == -window w as (order by f_numeric range between - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - 1.1 preceding and 'NaN' following) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'NaN'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":82,"stopIndex":162,"fragment":"(order by f_numeric range between\n 1.1 preceding and 'NaN' following"}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out index 68fde56dba101..7b49594f85d7f 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out @@ -69,11 +69,7 @@ insert into datetimes values struct<> -- !query output org.apache.spark.sql.AnalysisException -failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): [CAST_INVALID_INPUT] The value '11:00 BST' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 2, position 24) == -(1, timestamp '11:00', cast ('11:00 BST' as timestamp), cast ('1 year' as timestamp), ... - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -; line 1 pos 22 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -246,7 +242,7 @@ from t1 where f1 = f2 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(PARTITION BY spark_catalog.default.t1.f1 RANGE BETWEEN 1 PRECEDING AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 24 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -296,7 +292,7 @@ SELECT * FROM empsalary WHERE row_number() OVER (ORDER BY salary) < 10 struct<> -- !query output org.apache.spark.sql.AnalysisException -It is not allowed to use window functions inside WHERE clause +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -305,10 +301,7 @@ SELECT * FROM empsalary INNER JOIN tenk1 ON row_number() OVER (ORDER BY salary) struct<> -- !query output org.apache.spark.sql.AnalysisException - -The query operator `Join` contains one or more unsupported -expression types Aggregate, Window or Generate. -Invalid expressions: [row_number() OVER (ORDER BY spark_catalog.default.empsalary.salary ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)] +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -317,10 +310,7 @@ SELECT rank() OVER (ORDER BY 1), count(*) FROM empsalary GROUP BY 1 struct<> -- !query output org.apache.spark.sql.AnalysisException - -The query operator `Aggregate` contains one or more unsupported -expression types Aggregate, Window or Generate. -Invalid expressions: [RANK() OVER (ORDER BY 1 ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)] +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -329,12 +319,7 @@ SELECT * FROM rank() OVER (ORDER BY random()) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'BY'(line 1, pos 33) - -== SQL == -SELECT * FROM rank() OVER (ORDER BY random()) ----------------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'BY'",""],"queryContext":[]} -- !query @@ -343,7 +328,7 @@ SELECT * FROM empsalary WHERE (rank() OVER (ORDER BY random())) > 10 struct<> -- !query output org.apache.spark.sql.AnalysisException -It is not allowed to use window functions inside WHERE clause +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -352,7 +337,7 @@ SELECT * FROM empsalary WHERE rank() OVER (ORDER BY random()) struct<> -- !query output org.apache.spark.sql.AnalysisException -It is not allowed to use window functions inside WHERE clause +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -361,12 +346,7 @@ select rank() OVER (PARTITION BY four, ORDER BY ten) FROM tenk1 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'BY': extra input 'BY'(line 1, pos 45) - -== SQL == -select rank() OVER (PARTITION BY four, ORDER BY ten) FROM tenk1 ----------------------------------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'BY'",": extra input 'BY'"],"queryContext":[]} -- !query @@ -375,7 +355,7 @@ SELECT range(1, 100) OVER () FROM empsalary struct<> -- !query output org.apache.spark.sql.AnalysisException -Undefined function: range. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.range.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -384,7 +364,7 @@ SELECT ntile(0) OVER (ORDER BY ten), ten, four FROM tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'ntile(0)' due to data type mismatch: Buckets expression must be positive, but got: 0; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -393,7 +373,7 @@ SELECT nth_value(four, 0) OVER (ORDER BY ten), ten, four FROM tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'nth_value(spark_catalog.default.tenk1.four, 0)' due to data type mismatch: The 'offset' argument of nth_value must be greater than zero but it is 0.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out index 60806a97b5bc8..9ad4361d40f01 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out @@ -498,8 +498,4 @@ FROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b) struct<> -- !query output org.apache.spark.sql.AnalysisException -failed to evaluate expression CAST('nan' AS INT): [CAST_INVALID_INPUT] The value 'nan' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 3, position 29) == -FROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b) - ^^^^^^^^^^^^^^^^^^ -; line 3 pos 6 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out index 0a4e2f179d8c2..d9e8c33975830 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out @@ -216,7 +216,7 @@ SELECT * FROM outermost ORDER BY 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Table or view not found: outermost; line 4 pos 23 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -346,12 +346,7 @@ create table foo (with baz) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -DataType baz is not supported.(line 1, pos 23) - -== SQL == -create table foo (with baz) ------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -360,12 +355,7 @@ create table foo (with ordinality) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -DataType ordinality is not supported.(line 1, pos 23) - -== SQL == -create table foo (with ordinality) ------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -382,7 +372,7 @@ WITH test AS (SELECT 42) INSERT INTO test VALUES (1) struct<> -- !query output org.apache.spark.sql.AnalysisException -Table not found: test; line 1 pos 37 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/query_regex_column.sql.out b/sql/core/src/test/resources/sql-tests/results/query_regex_column.sql.out index e454dc5e041f2..5de34d4ef8189 100644 --- a/sql/core/src/test/resources/sql-tests/results/query_regex_column.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/query_regex_column.sql.out @@ -33,7 +33,7 @@ SELECT `(a)?+.+` FROM testData2 WHERE a = 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `(a)?+.+` cannot be resolved. Did you mean one of the following? [`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a)?+.+`","`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"],"queryContext":[]} -- !query @@ -42,7 +42,7 @@ SELECT t.`(a)?+.+` FROM testData2 t WHERE a = 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `t`.`(a)?+.+` cannot be resolved. Did you mean one of the following? [`t`.`A`, `t`.`B`, `t`.`c`, `t`.`d`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t`.`(a)?+.+`","`t`.`A`, `t`.`B`, `t`.`c`, `t`.`d`"],"queryContext":[]} -- !query @@ -51,7 +51,7 @@ SELECT `(a|b)` FROM testData2 WHERE a = 2 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `(a|b)` cannot be resolved. Did you mean one of the following? [`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a|b)`","`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"],"queryContext":[]} -- !query @@ -60,7 +60,7 @@ SELECT `(a|b)?+.+` FROM testData2 WHERE a = 2 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `(a|b)?+.+` cannot be resolved. Did you mean one of the following? [`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a|b)?+.+`","`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"],"queryContext":[]} -- !query @@ -69,7 +69,7 @@ SELECT SUM(`(a|b)?+.+`) FROM testData2 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `(a|b)?+.+` cannot be resolved. Did you mean one of the following? [`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`]; line 1 pos 11 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a|b)?+.+`","`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"],"queryContext":[]} -- !query @@ -78,7 +78,7 @@ SELECT SUM(`(a)`) FROM testData2 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `(a)` cannot be resolved. Did you mean one of the following? [`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`]; line 1 pos 11 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a)`","`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"],"queryContext":[]} -- !query @@ -298,7 +298,7 @@ SELECT SUM(a) FROM testdata3 GROUP BY `(a)` struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `(a)` cannot be resolved. Did you mean one of the following? [`testdata3`.`a`, `testdata3`.`b`]; line 1 pos 38 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a)`","`testdata3`.`a`, `testdata3`.`b`"],"queryContext":[]} -- !query @@ -307,4 +307,4 @@ SELECT SUM(a) FROM testdata3 GROUP BY `(a)?+.+` struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `(a)?+.+` cannot be resolved. Did you mean one of the following? [`testdata3`.`a`, `testdata3`.`b`]; line 1 pos 38 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a)?+.+`","`testdata3`.`a`, `testdata3`.`b`"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/random.sql.out b/sql/core/src/test/resources/sql-tests/results/random.sql.out index e3edddb26d90c..51ef5efba58ec 100644 --- a/sql/core/src/test/resources/sql-tests/results/random.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/random.sql.out @@ -37,7 +37,7 @@ SELECT rand(1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'rand(1.0BD)' due to data type mismatch: argument 1 requires (int or bigint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -78,4 +78,4 @@ SELECT rand('1') struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'rand('1')' due to data type mismatch: argument 1 requires (int or bigint) type, however, ''1'' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out index 6a07df854beb5..b9da66b050648 100644 --- a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out @@ -131,7 +131,7 @@ SELECT regexp_extract('1a 2b 14m', '(?l)') struct<> -- !query output org.apache.spark.SparkRuntimeException -[INVALID_PARAMETER_VALUE] The value of parameter(s) 'regexp' in `regexp_extract` is invalid: (?l) +{"errorClass":"INVALID_PARAMETER_VALUE","sqlState":"22023","messageParameters":["regexp","`regexp_extract`","(?l)"],"queryContext":[]} -- !query @@ -265,8 +265,8 @@ SELECT regexp_extract_all('abc', col0, 1) FROM VALUES('], [') AS t(col0) -- !query schema struct<> -- !query output -org.apache.spark.SparkRuntimeException -[INVALID_PARAMETER_VALUE] The value of parameter(s) 'regexp' in `regexp_extract_all` is invalid: ], [ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -283,7 +283,7 @@ SELECT regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something', -2) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something', -2)' due to data type mismatch: Position expression must be positive, but got: -2; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -292,7 +292,7 @@ SELECT regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something', 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something', 0)' due to data type mismatch: Position expression must be positive, but got: 0; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -564,5 +564,5 @@ SELECT regexp_instr('abc', col0, 1) FROM VALUES(') ?') AS t(col0) -- !query schema struct<> -- !query output -org.apache.spark.SparkRuntimeException -[INVALID_PARAMETER_VALUE] The value of parameter(s) 'regexp' in `regexp_instr` is invalid: ) ? +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out index 1fc07d7151374..b75e350ec60f9 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out @@ -166,12 +166,7 @@ SHOW TABLE EXTENDED struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near end of input(line 1, pos 19) - -== SQL == -SHOW TABLE EXTENDED --------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["end of input",""],"queryContext":[]} -- !query @@ -191,12 +186,7 @@ SHOW TABLE EXTENDED PARTITION(c='Us', d=1) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'PARTITION'(line 1, pos 20) - -== SQL == -SHOW TABLE EXTENDED PARTITION(c='Us', d=1) ---------------------^^^ +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'PARTITION'",""],"queryContext":[]} -- !query @@ -205,7 +195,7 @@ SHOW TABLE EXTENDED LIKE 'show_t*' PARTITION(c='Us', d=1) struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchTableException -Table or view 'show_t*' not found in database 'showdb' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -214,7 +204,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us') struct<> -- !query output org.apache.spark.sql.AnalysisException -Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`showdb`.`show_t1`' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -223,7 +213,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(a='Us', d=1) struct<> -- !query output org.apache.spark.sql.AnalysisException -a is not a valid partition column in table `spark_catalog`.`showdb`.`show_t1`. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -232,9 +222,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Ch', d=1) struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException -Partition not found in table 'show_t1' database 'showdb': -c -> Ch -d -> 1 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/show-views.sql.out b/sql/core/src/test/resources/sql-tests/results/show-views.sql.out index 8dc2f663ba067..1fafe63a1e498 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-views.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-views.sql.out @@ -139,7 +139,7 @@ SHOW VIEWS IN wrongdb LIKE 'view_*' struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException -Database 'wrongdb' not found +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out index 8a858343b240e..db3c245a25920 100644 --- a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out @@ -91,7 +91,7 @@ SHOW COLUMNS IN badtable FROM showdb struct<> -- !query output org.apache.spark.sql.AnalysisException -Table or view not found: showdb.badtable; line 1 pos 16 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -109,7 +109,7 @@ SHOW COLUMNS IN showdb.showcolumn1 FROM baddb struct<> -- !query output org.apache.spark.sql.AnalysisException -SHOW COLUMNS with conflicting databases: 'baddb' != 'showdb' +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -127,7 +127,7 @@ SHOW COLUMNS IN showdb.showcolumn3 struct<> -- !query output org.apache.spark.sql.AnalysisException -Table or view not found: showdb.showcolumn3; line 1 pos 16 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -136,7 +136,7 @@ SHOW COLUMNS IN showcolumn3 FROM showdb struct<> -- !query output org.apache.spark.sql.AnalysisException -Table or view not found: showdb.showcolumn3; line 1 pos 16 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -145,7 +145,7 @@ SHOW COLUMNS IN showcolumn4 struct<> -- !query output org.apache.spark.sql.AnalysisException -Table or view not found: showcolumn4; line 1 pos 16 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out index 9521cbf0bf2b8..98a1e84bda32a 100644 --- a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out @@ -93,7 +93,7 @@ SELECT string(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -Function string accepts only one argument; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out index 0ac05bb29c9ff..812a28d42bb47 100644 --- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out @@ -5,7 +5,7 @@ select concat_ws() struct<> -- !query output org.apache.spark.sql.AnalysisException -requirement failed: concat_ws requires at least one argument.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -14,7 +14,7 @@ select format_string() struct<> -- !query output org.apache.spark.sql.AnalysisException -requirement failed: format_string() should take at least 1 argument; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -167,7 +167,7 @@ SELECT split_part('11.12.13', '.', 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -[ELEMENT_AT_BY_INDEX_ZERO] The index 0 is invalid. An index shall be either < 0 or > 0 (the first element has index 1). +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} -- !query @@ -648,7 +648,7 @@ select decode() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -657,7 +657,7 @@ select decode(encode('abc', 'utf-8')) struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1122,7 +1122,7 @@ select to_binary(null, cast(null as int)) struct<> -- !query output org.apache.spark.sql.AnalysisException -The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1131,7 +1131,7 @@ select to_binary('abc', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1140,7 +1140,7 @@ select to_binary('abc', 'invalidFormat') struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid value for the 'format' parameter of function 'to_binary': invalidformat. The value has to be a case-insensitive string literal of 'hex', 'utf-8', or 'base64'. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out index 6962aea42206a..80eaed011190c 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out @@ -38,15 +38,7 @@ select 1 from tab_a where (a1, b1) not in (select (a2, b2) from tab_b) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(named_struct('a1', tab_a.a1, 'b1', tab_a.b1) IN (listquery()))' due to data type mismatch: -The number of columns in the left hand side of an IN subquery does not match the -number of columns in the output of subquery. -#columns in left hand side: 2. -#columns in right hand side: 1. -Left side columns: -[tab_a.a1, tab_a.b1]. -Right side columns: -[`named_struct(a2, a2, b2, b2)`].; line 1 pos 35 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out index b358fed2663e6..6e705b8a5de7c 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out @@ -43,7 +43,7 @@ AND t2b = (SELECT max(avg) struct<> -- !query output org.apache.spark.sql.AnalysisException -grouping expressions sequence is empty, and 't2.t2b' is not an aggregate function. Wrap '(avg(t2.t2b) AS avg)' in windowing function(s) or wrap 't2.t2b' in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -60,7 +60,7 @@ WHERE t1a IN (SELECT min(t2a) struct<> -- !query output org.apache.spark.sql.AnalysisException -Resolved attribute(s) t2b#x missing from min(t2a)#x,t2c#x in operator !Filter t2c#x IN (list#x [t2b#x]). +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -75,7 +75,7 @@ HAVING EXISTS (SELECT t2a struct<> -- !query output org.apache.spark.sql.AnalysisException -Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: min((outer(t1.t1a) + t2.t2a)) +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -91,7 +91,7 @@ WHERE t1a IN (SELECT t2a struct<> -- !query output org.apache.spark.sql.AnalysisException -Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: min((outer(t2.t2a) + t3.t3a)) +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -105,14 +105,7 @@ WHERE t1a IN (SELECT t2a struct<> -- !query output org.apache.spark.sql.AnalysisException -Expressions referencing the outer query are not supported outside of WHERE/HAVING clauses: -Aggregate [min(outer(t2a#x)) AS min(outer(t2.t2a))#x] -+- SubqueryAlias t3 - +- View (`t3`, [t3a#x,t3b#x,t3c#x]) - +- Project [cast(t3a#x as int) AS t3a#x, cast(t3b#x as int) AS t3b#x, cast(t3c#x as int) AS t3c#x] - +- Project [t3a#x, t3b#x, t3c#x] - +- SubqueryAlias t3 - +- LocalRelation [t3a#x, t3b#x, t3c#x] +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -134,4 +127,4 @@ ON EXISTS (SELECT 1 FROM t2 WHERE t2a > t1a) struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `t1a` cannot be resolved. Did you mean one of the following? [`t2`.`t2a`, `t2`.`t2b`, `t2`.`t2c`]; line 4 pos 44 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t1a`","`t2`.`t2a`, `t2`.`t2b`, `t2`.`t2c`"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out index 8a8334fb57d68..54244128d0bfb 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out @@ -61,7 +61,7 @@ FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Scalar subquery must return only one column, but got 2 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -76,7 +76,7 @@ FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Scalar subquery must return only one column, but got 2 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -89,15 +89,7 @@ t1a IN (SELECT t2a, t2b struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(t1.t1a IN (listquery(t1.t1a)))' due to data type mismatch: -The number of columns in the left hand side of an IN subquery does not match the -number of columns in the output of subquery. -#columns in left hand side: 1. -#columns in right hand side: 2. -Left side columns: -[t1.t1a]. -Right side columns: -[t2.t2a, t2.t2b].; line 3 pos 4 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -110,15 +102,7 @@ WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(named_struct('t1a', t1.t1a, 't1b', t1.t1b) IN (listquery(t1.t1a)))' due to data type mismatch: -The number of columns in the left hand side of an IN subquery does not match the -number of columns in the output of subquery. -#columns in left hand side: 2. -#columns in right hand side: 1. -Left side columns: -[t1.t1a, t1.t1b]. -Right side columns: -[t2.t2a].; line 3 pos 11 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -132,12 +116,4 @@ WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(named_struct('t4a', t4.t4a, 't4b', t4.t4b, 't4c', t4.t4c) IN (listquery()))' due to data type mismatch: -The data type of one or more elements in the left hand side of an IN subquery -is not compatible with the data type of the output of the subquery -Mismatched columns: -[(t4.t4a:double, t5.t5a:timestamp), (t4.t4c:string, t5.t5c:bigint)] -Left side: -[double, string, string]. -Right side: -[timestamp, string, bigint].; line 3 pos 16 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out b/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out index 5aaddbf91c22b..962722fada63e 100644 --- a/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out @@ -39,7 +39,7 @@ SELECT * FROM testData AS t(col1, col2, col3) struct<> -- !query output org.apache.spark.sql.AnalysisException -Number of column aliases does not match number of columns. Number of column aliases: 3; number of columns: 2.; line 1 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -48,7 +48,7 @@ SELECT * FROM testData AS t(col1) struct<> -- !query output org.apache.spark.sql.AnalysisException -Number of column aliases does not match number of columns. Number of column aliases: 1; number of columns: 2.; line 1 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -57,7 +57,7 @@ SELECT a AS col1, b AS col2 FROM testData AS t(c, d) struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `a` cannot be resolved. Did you mean one of the following? [`t`.`c`, `t`.`d`]; line 1 pos 7 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`","`t`.`c`, `t`.`d`"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out index 1ce6fbbdbc84b..25f4ea505400d 100644 --- a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out @@ -5,7 +5,7 @@ select * from dummy(3) struct<> -- !query output org.apache.spark.sql.AnalysisException -could not resolve `dummy` to a table-valued function; line 1 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -67,12 +67,7 @@ select * from range(1, 1, 1, 1, 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -Table-valued function range with alternatives: - range(start: long, end: long, step: long, numSlices: integer) - range(start: long, end: long, step: long) - range(start: long, end: long) - range(end: long) -cannot be applied to (integer, integer, integer, integer, integer): Invalid number of arguments for function range. Expected: one of 1, 2, 3 and 4; Found: 5; line 1 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -81,12 +76,7 @@ select * from range(1, null) struct<> -- !query output org.apache.spark.sql.AnalysisException -Table-valued function range with alternatives: - range(start: long, end: long, step: long, numSlices: integer) - range(start: long, end: long, step: long) - range(start: long, end: long) - range(end: long) -cannot be applied to (integer, void): Incompatible input data type. Expected: long; Found: void; line 1 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -95,12 +85,7 @@ select * from range(array(1, 2, 3)) struct<> -- !query output org.apache.spark.sql.AnalysisException -Table-valued function range with alternatives: - range(start: long, end: long, step: long, numSlices: integer) - range(start: long, end: long, step: long) - range(start: long, end: long) - range(end: long) -cannot be applied to (array): Incompatible input data type. Expected: long; Found: array; line 1 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -109,12 +94,7 @@ select * from range(0, 5, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -Table-valued function range with alternatives: - range(start: long, end: long, step: long, numSlices: integer) - range(start: long, end: long, step: long) - range(start: long, end: long) - range(end: long) -cannot be applied to (integer, integer, integer): requirement failed: step (0) cannot be 0; line 1 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out b/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out index 9624cdfe7b9d9..96ef730f1030c 100644 --- a/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out @@ -29,12 +29,7 @@ SELECT mydb1.t1 FROM t1 TABLESAMPLE (-1 PERCENT) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Sampling fraction (-0.01) must be on interval [0, 1](line 1, pos 24) - -== SQL == -SELECT mydb1.t1 FROM t1 TABLESAMPLE (-1 PERCENT) -------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -43,12 +38,7 @@ SELECT mydb1.t1 FROM t1 TABLESAMPLE (101 PERCENT) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Sampling fraction (1.01) must be on interval [0, 1](line 1, pos 24) - -== SQL == -SELECT mydb1.t1 FROM t1 TABLESAMPLE (101 PERCENT) -------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out index acdc65a23f4b9..3efcfd544fae7 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out @@ -45,7 +45,7 @@ SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 45.678, 'CET') struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function make_timestamp_ntz. Expected: 6; Found: 7; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out index 8894b07fa9e3b..ae92d14472ce2 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out @@ -13,12 +13,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7) - -== SQL == -select timestamp '2019-01-01中文' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -27,12 +22,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7) - -== SQL == -select timestamp'4294967297' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -41,12 +31,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7) - -== SQL == -select timestamp'2021-01-01T12:30:4294967297.123456' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -605,7 +590,7 @@ select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -614,7 +599,7 @@ select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -647,7 +632,7 @@ select str - timestamp'2011-11-11 11:11:11' from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(ts_view.str - TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -656,7 +641,7 @@ select timestamp'2011-11-11 11:11:11' - str from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -665,7 +650,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -674,7 +659,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' (double and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -683,7 +668,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -692,7 +677,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -724,8 +709,7 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'yyyy-MM-dd GGGGG'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -734,8 +718,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -744,8 +727,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -754,8 +736,7 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -764,8 +745,7 @@ select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat' struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -774,8 +754,7 @@ select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMM struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index 3f1a46927d91e..83add785b6efe 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -13,12 +13,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7) - -== SQL == -select timestamp '2019-01-01中文' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -27,12 +22,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7) - -== SQL == -select timestamp'4294967297' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -41,12 +31,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7) - -== SQL == -select timestamp'2021-01-01T12:30:4294967297.123456' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -95,7 +80,7 @@ SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007) struct<> -- !query output org.apache.spark.SparkDateTimeException -[INVALID_FRACTION_OF_SECOND] The fraction of sec must be zero. Valid range is [0, 60]. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"INVALID_FRACTION_OF_SECOND","sqlState":"22023","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -329,7 +314,7 @@ select to_timestamp(1) struct<> -- !query output org.apache.spark.SparkDateTimeException -[CAST_INVALID_INPUT] The value '1' of the type "STRING" cannot be cast to "TIMESTAMP_NTZ" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1'","\"STRING\"","\"TIMESTAMP_NTZ\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":-1,"stopIndex":-1,"fragment":""}]} -- !query @@ -338,7 +323,7 @@ select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '2019-10-06 10:11:12.' could not be parsed at index 20. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2019-10-06 10:11:12.' could not be parsed at index 20","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -403,7 +388,7 @@ select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSS struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '2019-10-06 10:11:12.1234567PST' could not be parsed, unparsed text found at index 26. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2019-10-06 10:11:12.1234567PST' could not be parsed, unparsed text found at index 26","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -420,7 +405,7 @@ select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd H struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '223456 2019-10-06 10:11:12.123456PST' could not be parsed at index 27. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '223456 2019-10-06 10:11:12.123456PST' could not be parsed at index 27","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -485,7 +470,7 @@ select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '12.1232019-10-06S10:11' could not be parsed at index 7. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '12.1232019-10-06S10:11' could not be parsed at index 7","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -494,7 +479,7 @@ select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Text '12.1232019-10-06S10:11' could not be parsed at index 9. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '12.1232019-10-06S10:11' could not be parsed at index 9","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -567,7 +552,7 @@ select to_timestamp("02-29", "MM-dd") struct<> -- !query output org.apache.spark.SparkDateTimeException -[CANNOT_PARSE_TIMESTAMP] Invalid date 'February 29' as '1970' is not a leap year. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Invalid date 'February 29' as '1970' is not a leap year","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -672,7 +657,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP_NTZ))' due to data type mismatch: '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP_NTZ))' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp_ntz; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -681,7 +666,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS TIMESTAMP_NTZ) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: '(CAST('1' AS TIMESTAMP_NTZ) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp_ntz; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -690,7 +675,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' (timestamp_ntz and void).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -699,7 +684,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (void and timestamp_ntz).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -758,8 +743,7 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out index 6b575697af642..5d858ec119307 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out @@ -13,12 +13,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7) - -== SQL == -select timestamp '2019-01-01中文' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -27,12 +22,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7) - -== SQL == -select timestamp'4294967297' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -41,12 +31,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7) - -== SQL == -select timestamp'2021-01-01T12:30:4294967297.123456' --------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -605,7 +590,7 @@ select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -614,7 +599,7 @@ select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP_NTZ '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -647,7 +632,7 @@ select str - timestamp'2011-11-11 11:11:11' from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(ts_view.str - TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -656,7 +641,7 @@ select timestamp'2011-11-11 11:11:11' - str from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -665,7 +650,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp_ntz and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -674,7 +659,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (double and timestamp_ntz).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -683,7 +668,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' (timestamp_ntz and void).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -692,7 +677,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (void and timestamp_ntz).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -751,8 +736,7 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timezone.sql.out b/sql/core/src/test/resources/sql-tests/results/timezone.sql.out index 7dbdb174be09a..8612341178a8e 100644 --- a/sql/core/src/test/resources/sql-tests/results/timezone.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timezone.sql.out @@ -45,12 +45,7 @@ SET TIME ZONE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Invalid time zone displacement value(line 1, pos 0) - -== SQL == -SET TIME ZONE -^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -68,12 +63,7 @@ SET TIME ZONE INTERVAL 3 DAYS struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -The interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14) - -== SQL == -SET TIME ZONE INTERVAL 3 DAYS ---------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -82,12 +72,7 @@ SET TIME ZONE INTERVAL 24 HOURS struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -The interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14) - -== SQL == -SET TIME ZONE INTERVAL 24 HOURS ---------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -96,12 +81,7 @@ SET TIME ZONE INTERVAL '19:40:32' HOUR TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -The interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14) - -== SQL == -SET TIME ZONE INTERVAL '19:40:32' HOUR TO SECOND ---------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -110,12 +90,7 @@ SET TIME ZONE INTERVAL 10 HOURS 'GMT+1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Invalid time zone displacement value(line 1, pos 0) - -== SQL == -SET TIME ZONE INTERVAL 10 HOURS 'GMT+1' -^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -124,9 +99,4 @@ SET TIME ZONE INTERVAL 10 HOURS 1 MILLISECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -The interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14) - -== SQL == -SET TIME ZONE INTERVAL 10 HOURS 1 MILLISECOND ---------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/transform.sql.out b/sql/core/src/test/resources/sql-tests/results/transform.sql.out index b921518bf8d82..d9f63aa8a1970 100644 --- a/sql/core/src/test/resources/sql-tests/results/transform.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/transform.sql.out @@ -400,36 +400,7 @@ SELECT a, b, decode(c, 'UTF-8'), d, e, f, g, h, i, j, k, l FROM ( struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -LINES TERMINATED BY only supports newline '\n' right now: @(line 3, pos 4) - -== SQL == -SELECT a, b, decode(c, 'UTF-8'), d, e, f, g, h, i, j, k, l FROM ( - SELECT TRANSFORM(a, b, c, d, e, f, g, h, i, j, k, l) - ROW FORMAT DELIMITED -----^^^ - FIELDS TERMINATED BY ',' - LINES TERMINATED BY '@' - NULL DEFINED AS 'NULL' - USING 'cat' AS ( - a string, - b string, - c string, - d string, - e string, - f string, - g string, - h string, - i string, - j string, - k string, - l string) - ROW FORMAT DELIMITED - FIELDS TERMINATED BY ',' - LINES TERMINATED BY '@' - NULL DEFINED AS 'NULL' - FROM t -) tmp +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -715,15 +686,7 @@ WHERE a <= 4 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[UNSUPPORTED_FEATURE.TRANSFORM_DISTINCT_ALL] The feature is not supported: TRANSFORM with the DISTINCT/ALL clause.(line 1, pos 17) - -== SQL == -SELECT TRANSFORM(DISTINCT b, a, c) ------------------^^^ - USING 'cat' AS (a, b, c) -FROM script_trans -WHERE a <= 4 +{"errorClass":"UNSUPPORTED_FEATURE","errorSubClass":"TRANSFORM_DISTINCT_ALL","sqlState":"0A000","messageParameters":[],"queryContext":[]} -- !query @@ -735,15 +698,7 @@ WHERE a <= 4 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[UNSUPPORTED_FEATURE.TRANSFORM_DISTINCT_ALL] The feature is not supported: TRANSFORM with the DISTINCT/ALL clause.(line 1, pos 17) - -== SQL == -SELECT TRANSFORM(ALL b, a, c) ------------------^^^ - USING 'cat' AS (a, b, c) -FROM script_trans -WHERE a <= 4 +{"errorClass":"UNSUPPORTED_FEATURE","errorSubClass":"TRANSFORM_DISTINCT_ALL","sqlState":"0A000","messageParameters":[],"queryContext":[]} -- !query @@ -756,16 +711,7 @@ GROUP BY b struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'AS'(line 1, pos 19) - -== SQL == -SELECT TRANSFORM(b AS b_1, MAX(a), CAST(sum(c) AS STRING)) --------------------^^^ - USING 'cat' AS (a, b, c) -FROM script_trans -WHERE a <= 2 -GROUP BY b +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'AS'",""],"queryContext":[]} -- !query @@ -778,16 +724,7 @@ GROUP BY b struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'b_1'(line 1, pos 19) - -== SQL == -SELECT TRANSFORM(b b_1, MAX(a), CAST(sum(c) AS STRING)) --------------------^^^ - USING 'cat' AS (a, b, c) -FROM script_trans -WHERE a <= 2 -GROUP BY b +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'b_1'",""],"queryContext":[]} -- !query @@ -800,16 +737,7 @@ GROUP BY b struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[PARSE_SYNTAX_ERROR] Syntax error at or near 'AS'(line 1, pos 27) - -== SQL == -SELECT TRANSFORM(b, MAX(a) AS max_a, CAST(sum(c) AS STRING)) ----------------------------^^^ - USING 'cat' AS (a, b, c) -FROM script_trans -WHERE a <= 2 -GROUP BY b +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'AS'",""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out index 8622b97a20502..7396b252e142e 100644 --- a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out @@ -141,7 +141,7 @@ SELECT try_add(interval 2 year, interval 2 second) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'INTERVAL '2' YEAR + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2' YEAR' is of interval year type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/try_datetime_functions.sql.out b/sql/core/src/test/resources/sql-tests/results/try_datetime_functions.sql.out index a2326ee08145b..2a7e37edccaf8 100644 --- a/sql/core/src/test/resources/sql-tests/results/try_datetime_functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/try_datetime_functions.sql.out @@ -45,5 +45,4 @@ select try_to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/try_element_at.sql.out b/sql/core/src/test/resources/sql-tests/results/try_element_at.sql.out index defac2ea5d62c..21b1f73b85f6c 100644 --- a/sql/core/src/test/resources/sql-tests/results/try_element_at.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/try_element_at.sql.out @@ -5,7 +5,7 @@ SELECT try_element_at(array(1, 2, 3), 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -[ELEMENT_AT_BY_INDEX_ZERO] The index 0 is invalid. An index shall be either < 0 or > 0 (the first element has index 1). +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out index 75a8baea6aa32..9110a17eb1ede 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out @@ -77,7 +77,7 @@ SELECT true = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(true = CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -94,7 +94,7 @@ SELECT true = cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(true = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -103,7 +103,7 @@ SELECT true = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(true = CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -176,7 +176,7 @@ SELECT true <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(true <=> CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -193,7 +193,7 @@ SELECT true <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(true <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -202,7 +202,7 @@ SELECT true <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(true <=> CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -275,7 +275,7 @@ SELECT cast('1' as binary) = true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) = true)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = true)' (binary and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -292,7 +292,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = true)' (timestamp and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -301,7 +301,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = true)' (date and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -374,7 +374,7 @@ SELECT cast('1' as binary) <=> true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) <=> true)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> true)' (binary and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -391,7 +391,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> true)' (timestamp and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -400,7 +400,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> true)' (date and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -473,7 +473,7 @@ SELECT false = cast('0' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(false = CAST('0' AS BINARY))' due to data type mismatch: differing types in '(false = CAST('0' AS BINARY))' (boolean and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -490,7 +490,7 @@ SELECT false = cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(false = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(false = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -499,7 +499,7 @@ SELECT false = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(false = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(false = CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -572,7 +572,7 @@ SELECT false <=> cast('0' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(false <=> CAST('0' AS BINARY))' due to data type mismatch: differing types in '(false <=> CAST('0' AS BINARY))' (boolean and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -589,7 +589,7 @@ SELECT false <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(false <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(false <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -598,7 +598,7 @@ SELECT false <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(false <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(false <=> CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -671,7 +671,7 @@ SELECT cast('0' as binary) = false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('0' AS BINARY) = false)' due to data type mismatch: differing types in '(CAST('0' AS BINARY) = false)' (binary and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -688,7 +688,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = false)' (timestamp and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -697,7 +697,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = false)' (date and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -770,7 +770,7 @@ SELECT cast('0' as binary) <=> false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('0' AS BINARY) <=> false)' due to data type mismatch: differing types in '(CAST('0' AS BINARY) <=> false)' (binary and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -787,7 +787,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> false)' (timestamp and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -796,4 +796,4 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> false)' (date and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out index 11725df995189..e505da36f4926 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out @@ -77,7 +77,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast('2' as binary) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE binary END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE boolean END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE timestamp END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE date END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast('2' as binary) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE binary END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast(2 as boolean) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE boolean END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE timestamp END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE date END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE binary END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE boolean END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast('2017-12-11 09:30:00.0' as t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE timestamp END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast('2017-12-11 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE date END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE binary END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE boolean END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast('2017-12-11 09:30:00.0' a struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE timestamp END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE date END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -477,7 +477,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE binary END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE boolean END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast('2017-12-11 09:30:00.0' as struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE timestamp END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -504,7 +504,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast('2017-12-11 09:30:00' as d struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE date END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -577,7 +577,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE binary END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -586,7 +586,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE boolean END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -595,7 +595,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast('2017-12-11 09:30:00.0' a struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE timestamp END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -604,7 +604,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE date END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast('2' as binary) EN struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE binary END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast(2 as boolean) END struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE boolean END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast('2017-12-11 09:30 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE timestamp END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -704,7 +704,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast('2017-12-11 09:30 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE date END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -777,7 +777,7 @@ SELECT CASE WHEN true THEN cast(1 as string) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN string ELSE binary END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -786,7 +786,7 @@ SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN string ELSE boolean END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -811,7 +811,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as tinyint) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE tinyint END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -820,7 +820,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as smallint) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE smallint END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -829,7 +829,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as int) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE int END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -838,7 +838,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as bigint) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE bigint END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -847,7 +847,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as float) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE float END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -856,7 +856,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as double) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE double END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -865,7 +865,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as decimal(10, 0)) EN struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE decimal(10,0) END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -874,7 +874,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as string) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS STRING) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE string END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -891,7 +891,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as boolean) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE boolean END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -900,7 +900,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE timestamp END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -909,7 +909,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE date END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -918,7 +918,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as tinyint) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE tinyint END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -927,7 +927,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as smallint) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE smallint END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -936,7 +936,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as int) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE int END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -945,7 +945,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as bigint) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE bigint END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -954,7 +954,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as float) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE float END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -963,7 +963,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as double) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE double END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -972,7 +972,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as decimal(10, 0)) END struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE decimal(10,0) END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -981,7 +981,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as string) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS STRING) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE string END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -990,7 +990,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast('2' as binary) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE binary END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1007,7 +1007,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE timestamp END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1016,7 +1016,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE date END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1025,7 +1025,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE tinyint END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1034,7 +1034,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE smallint END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1043,7 +1043,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE int END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1052,7 +1052,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE bigint END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1061,7 +1061,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE float END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1070,7 +1070,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE double END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1079,7 +1079,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE decimal(10,0) END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1096,7 +1096,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE binary END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1105,7 +1105,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE boolean END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1130,7 +1130,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE tinyint END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1139,7 +1139,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as sm struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE smallint END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1148,7 +1148,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as in struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE int END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1157,7 +1157,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as bi struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE bigint END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1166,7 +1166,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as fl struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE float END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1175,7 +1175,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as do struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE double END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1184,7 +1184,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as de struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE decimal(10,0) END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1201,7 +1201,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast('2' as struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE binary END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1210,7 +1210,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as bo struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE boolean END; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out index 94422bd2089f5..8e1d1dedc8ce9 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out @@ -13,7 +13,7 @@ select cast(1 as tinyint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -22,7 +22,7 @@ select cast(1 as smallint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -31,7 +31,7 @@ select cast(1 as int) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS INT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -40,7 +40,7 @@ select cast(1 as bigint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -49,7 +49,7 @@ select cast(1 as float) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -58,7 +58,7 @@ select cast(1 as double) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -67,7 +67,7 @@ select cast(1 as decimal(10, 0)) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -92,7 +92,7 @@ select cast('1' as binary) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -101,7 +101,7 @@ select cast(1 as boolean) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -126,7 +126,7 @@ select interval 2 day + cast(1 as tinyint) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -135,7 +135,7 @@ select interval 2 day + cast(1 as smallint) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -144,7 +144,7 @@ select interval 2 day + cast(1 as int) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS INT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -153,7 +153,7 @@ select interval 2 day + cast(1 as bigint) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -162,7 +162,7 @@ select interval 2 day + cast(1 as float) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -171,7 +171,7 @@ select interval 2 day + cast(1 as double) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -180,7 +180,7 @@ select interval 2 day + cast(1 as decimal(10, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -205,7 +205,7 @@ select interval 2 day + cast('1' as binary) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -214,7 +214,7 @@ select interval 2 day + cast(1 as boolean) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -239,7 +239,7 @@ select cast(1 as tinyint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS TINYINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -248,7 +248,7 @@ select cast(1 as smallint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS SMALLINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -257,7 +257,7 @@ select cast(1 as int) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS INT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -266,7 +266,7 @@ select cast(1 as bigint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BIGINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -275,7 +275,7 @@ select cast(1 as float) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS FLOAT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -284,7 +284,7 @@ select cast(1 as double) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DOUBLE) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -293,7 +293,7 @@ select cast(1 as decimal(10, 0)) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS DECIMAL(10,0)) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -318,7 +318,7 @@ select cast('1' as binary) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST('1' AS BINARY) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -327,7 +327,7 @@ select cast(1 as boolean) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'CAST(1 AS BOOLEAN) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out index 2dbe93f312d24..b263fe1b0c5f9 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out @@ -237,7 +237,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -246,7 +246,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -255,7 +255,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -264,7 +264,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -273,7 +273,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -282,7 +282,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -291,7 +291,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -300,7 +300,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -309,7 +309,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -318,7 +318,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -327,7 +327,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -336,7 +336,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -601,7 +601,7 @@ SELECT cast(1 as decimal(3, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -610,7 +610,7 @@ SELECT cast(1 as decimal(5, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -619,7 +619,7 @@ SELECT cast(1 as decimal(10, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -628,7 +628,7 @@ SELECT cast(1 as decimal(20, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -637,7 +637,7 @@ SELECT cast(1 as decimal(3, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -646,7 +646,7 @@ SELECT cast(1 as decimal(5, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -655,7 +655,7 @@ SELECT cast(1 as decimal(10, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -664,7 +664,7 @@ SELECT cast(1 as decimal(20, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -673,7 +673,7 @@ SELECT cast(1 as decimal(3, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -682,7 +682,7 @@ SELECT cast(1 as decimal(5, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -691,7 +691,7 @@ SELECT cast(1 as decimal(10, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -700,7 +700,7 @@ SELECT cast(1 as decimal(20, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -709,7 +709,7 @@ SELECT cast(1 as decimal(3, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -718,7 +718,7 @@ SELECT cast(1 as decimal(5, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -727,7 +727,7 @@ SELECT cast(1 as decimal(10, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -736,7 +736,7 @@ SELECT cast(1 as decimal(20, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -969,7 +969,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -978,7 +978,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -987,7 +987,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -996,7 +996,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1005,7 +1005,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1014,7 +1014,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1023,7 +1023,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1032,7 +1032,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1041,7 +1041,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1050,7 +1050,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1059,7 +1059,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1068,7 +1068,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1333,7 +1333,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) - CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1342,7 +1342,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) - CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1351,7 +1351,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1360,7 +1360,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) - CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1369,7 +1369,7 @@ SELECT cast(1 as decimal(3, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) - CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1378,7 +1378,7 @@ SELECT cast(1 as decimal(5, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) - CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1387,7 +1387,7 @@ SELECT cast(1 as decimal(10, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1396,7 +1396,7 @@ SELECT cast(1 as decimal(20, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) - CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1405,7 +1405,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1414,7 +1414,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1423,7 +1423,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1432,7 +1432,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1441,7 +1441,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1450,7 +1450,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1459,7 +1459,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1468,7 +1468,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1701,7 +1701,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1710,7 +1710,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1719,7 +1719,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1728,7 +1728,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1737,7 +1737,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1746,7 +1746,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1755,7 +1755,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1764,7 +1764,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1773,7 +1773,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1782,7 +1782,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1791,7 +1791,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1800,7 +1800,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2065,7 +2065,7 @@ SELECT cast(1 as decimal(3, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2074,7 +2074,7 @@ SELECT cast(1 as decimal(5, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2083,7 +2083,7 @@ SELECT cast(1 as decimal(10, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2092,7 +2092,7 @@ SELECT cast(1 as decimal(20, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2101,7 +2101,7 @@ SELECT cast(1 as decimal(3, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2110,7 +2110,7 @@ SELECT cast(1 as decimal(5, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2119,7 +2119,7 @@ SELECT cast(1 as decimal(10, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2128,7 +2128,7 @@ SELECT cast(1 as decimal(20, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2137,7 +2137,7 @@ SELECT cast(1 as decimal(3, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2146,7 +2146,7 @@ SELECT cast(1 as decimal(5, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2155,7 +2155,7 @@ SELECT cast(1 as decimal(10, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2164,7 +2164,7 @@ SELECT cast(1 as decimal(20, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2173,7 +2173,7 @@ SELECT cast(1 as decimal(3, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2182,7 +2182,7 @@ SELECT cast(1 as decimal(5, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2191,7 +2191,7 @@ SELECT cast(1 as decimal(10, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2200,7 +2200,7 @@ SELECT cast(1 as decimal(20, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2433,7 +2433,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2442,7 +2442,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2451,7 +2451,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2460,7 +2460,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2469,7 +2469,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2478,7 +2478,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2487,7 +2487,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2496,7 +2496,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2505,7 +2505,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2514,7 +2514,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2523,7 +2523,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2532,7 +2532,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2797,7 +2797,7 @@ SELECT cast(1 as decimal(3, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2806,7 +2806,7 @@ SELECT cast(1 as decimal(5, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2815,7 +2815,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2824,7 +2824,7 @@ SELECT cast(1 as decimal(20, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2833,7 +2833,7 @@ SELECT cast(1 as decimal(3, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2842,7 +2842,7 @@ SELECT cast(1 as decimal(5, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2851,7 +2851,7 @@ SELECT cast(1 as decimal(10, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2860,7 +2860,7 @@ SELECT cast(1 as decimal(20, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2869,7 +2869,7 @@ SELECT cast(1 as decimal(3, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2878,7 +2878,7 @@ SELECT cast(1 as decimal(5, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2887,7 +2887,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2896,7 +2896,7 @@ SELECT cast(1 as decimal(20, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2905,7 +2905,7 @@ SELECT cast(1 as decimal(3, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2914,7 +2914,7 @@ SELECT cast(1 as decimal(5, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2923,7 +2923,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2932,7 +2932,7 @@ SELECT cast(1 as decimal(20, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3165,7 +3165,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3174,7 +3174,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3183,7 +3183,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3192,7 +3192,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3201,7 +3201,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3210,7 +3210,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3219,7 +3219,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3228,7 +3228,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3237,7 +3237,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3246,7 +3246,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3255,7 +3255,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3264,7 +3264,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3529,7 +3529,7 @@ SELECT cast(1 as decimal(3, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3538,7 +3538,7 @@ SELECT cast(1 as decimal(5, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3547,7 +3547,7 @@ SELECT cast(1 as decimal(10, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3556,7 +3556,7 @@ SELECT cast(1 as decimal(20, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3565,7 +3565,7 @@ SELECT cast(1 as decimal(3, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3574,7 +3574,7 @@ SELECT cast(1 as decimal(5, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3583,7 +3583,7 @@ SELECT cast(1 as decimal(10, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3592,7 +3592,7 @@ SELECT cast(1 as decimal(20, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3601,7 +3601,7 @@ SELECT cast(1 as decimal(3, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3610,7 +3610,7 @@ SELECT cast(1 as decimal(5, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3619,7 +3619,7 @@ SELECT cast(1 as decimal(10, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3628,7 +3628,7 @@ SELECT cast(1 as decimal(20, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3637,7 +3637,7 @@ SELECT cast(1 as decimal(3, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3646,7 +3646,7 @@ SELECT cast(1 as decimal(5, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3655,7 +3655,7 @@ SELECT cast(1 as decimal(10, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3664,7 +3664,7 @@ SELECT cast(1 as decimal(20, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3897,7 +3897,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(3, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3906,7 +3906,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(5, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3915,7 +3915,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3924,7 +3924,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(20, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3933,7 +3933,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(3, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3942,7 +3942,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(5, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3951,7 +3951,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(10, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3960,7 +3960,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(20, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3969,7 +3969,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(3, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3978,7 +3978,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(5, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3987,7 +3987,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(10, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3996,7 +3996,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(20, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4261,7 +4261,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4270,7 +4270,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4279,7 +4279,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4288,7 +4288,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4297,7 +4297,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4306,7 +4306,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4315,7 +4315,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4324,7 +4324,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4333,7 +4333,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4342,7 +4342,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4351,7 +4351,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4360,7 +4360,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4369,7 +4369,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4378,7 +4378,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4387,7 +4387,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4396,7 +4396,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4629,7 +4629,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4638,7 +4638,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4647,7 +4647,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4656,7 +4656,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4665,7 +4665,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4674,7 +4674,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4683,7 +4683,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4692,7 +4692,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4701,7 +4701,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4710,7 +4710,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4719,7 +4719,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4728,7 +4728,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -4993,7 +4993,7 @@ SELECT cast(1 as decimal(3, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5002,7 +5002,7 @@ SELECT cast(1 as decimal(5, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5011,7 +5011,7 @@ SELECT cast(1 as decimal(10, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5020,7 +5020,7 @@ SELECT cast(1 as decimal(20, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5061,7 +5061,7 @@ SELECT cast(1 as decimal(3, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5070,7 +5070,7 @@ SELECT cast(1 as decimal(5, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5079,7 +5079,7 @@ SELECT cast(1 as decimal(10, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5088,7 +5088,7 @@ SELECT cast(1 as decimal(20, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5097,7 +5097,7 @@ SELECT cast(1 as decimal(3, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5106,7 +5106,7 @@ SELECT cast(1 as decimal(5, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5115,7 +5115,7 @@ SELECT cast(1 as decimal(10, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5124,7 +5124,7 @@ SELECT cast(1 as decimal(20, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5357,7 +5357,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5366,7 +5366,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5375,7 +5375,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5384,7 +5384,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5393,7 +5393,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(3, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5402,7 +5402,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(5, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5411,7 +5411,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(10, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5420,7 +5420,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(20, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5429,7 +5429,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5438,7 +5438,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5447,7 +5447,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5456,7 +5456,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5721,7 +5721,7 @@ SELECT cast(1 as decimal(3, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5730,7 +5730,7 @@ SELECT cast(1 as decimal(5, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5739,7 +5739,7 @@ SELECT cast(1 as decimal(10, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5748,7 +5748,7 @@ SELECT cast(1 as decimal(20, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5789,7 +5789,7 @@ SELECT cast(1 as decimal(3, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5798,7 +5798,7 @@ SELECT cast(1 as decimal(5, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5807,7 +5807,7 @@ SELECT cast(1 as decimal(10, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5816,7 +5816,7 @@ SELECT cast(1 as decimal(20, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5825,7 +5825,7 @@ SELECT cast(1 as decimal(3, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5834,7 +5834,7 @@ SELECT cast(1 as decimal(5, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5843,7 +5843,7 @@ SELECT cast(1 as decimal(10, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -5852,7 +5852,7 @@ SELECT cast(1 as decimal(20, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6085,7 +6085,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6094,7 +6094,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6103,7 +6103,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6112,7 +6112,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6121,7 +6121,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6130,7 +6130,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6139,7 +6139,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6148,7 +6148,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6157,7 +6157,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6166,7 +6166,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6175,7 +6175,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6184,7 +6184,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6449,7 +6449,7 @@ SELECT cast(1 as decimal(3, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6458,7 +6458,7 @@ SELECT cast(1 as decimal(5, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6467,7 +6467,7 @@ SELECT cast(1 as decimal(10, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6476,7 +6476,7 @@ SELECT cast(1 as decimal(20, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6485,7 +6485,7 @@ SELECT cast(1 as decimal(3, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6494,7 +6494,7 @@ SELECT cast(1 as decimal(5, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6503,7 +6503,7 @@ SELECT cast(1 as decimal(10, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6512,7 +6512,7 @@ SELECT cast(1 as decimal(20, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6521,7 +6521,7 @@ SELECT cast(1 as decimal(3, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6530,7 +6530,7 @@ SELECT cast(1 as decimal(5, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6539,7 +6539,7 @@ SELECT cast(1 as decimal(10, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6548,7 +6548,7 @@ SELECT cast(1 as decimal(20, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6557,7 +6557,7 @@ SELECT cast(1 as decimal(3, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6566,7 +6566,7 @@ SELECT cast(1 as decimal(5, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6575,7 +6575,7 @@ SELECT cast(1 as decimal(10, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6584,7 +6584,7 @@ SELECT cast(1 as decimal(20, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6817,7 +6817,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6826,7 +6826,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6835,7 +6835,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6844,7 +6844,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6853,7 +6853,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(3, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6862,7 +6862,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(5, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6871,7 +6871,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(10, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6880,7 +6880,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(20, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6889,7 +6889,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6898,7 +6898,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6907,7 +6907,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -6916,7 +6916,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7181,7 +7181,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7190,7 +7190,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7199,7 +7199,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7208,7 +7208,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7217,7 +7217,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7226,7 +7226,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7235,7 +7235,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7244,7 +7244,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7253,7 +7253,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7262,7 +7262,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7271,7 +7271,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7280,7 +7280,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7289,7 +7289,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7298,7 +7298,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7307,7 +7307,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7316,7 +7316,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7549,7 +7549,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7558,7 +7558,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7567,7 +7567,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7576,7 +7576,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7585,7 +7585,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7594,7 +7594,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7603,7 +7603,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7612,7 +7612,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7621,7 +7621,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7630,7 +7630,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7639,7 +7639,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7648,7 +7648,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7913,7 +7913,7 @@ SELECT cast(1 as decimal(3, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7922,7 +7922,7 @@ SELECT cast(1 as decimal(5, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7931,7 +7931,7 @@ SELECT cast(1 as decimal(10, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7940,7 +7940,7 @@ SELECT cast(1 as decimal(20, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7949,7 +7949,7 @@ SELECT cast(1 as decimal(3, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7958,7 +7958,7 @@ SELECT cast(1 as decimal(5, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7967,7 +7967,7 @@ SELECT cast(1 as decimal(10, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7976,7 +7976,7 @@ SELECT cast(1 as decimal(20, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7985,7 +7985,7 @@ SELECT cast(1 as decimal(3, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -7994,7 +7994,7 @@ SELECT cast(1 as decimal(5, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8003,7 +8003,7 @@ SELECT cast(1 as decimal(10, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8012,7 +8012,7 @@ SELECT cast(1 as decimal(20, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8021,7 +8021,7 @@ SELECT cast(1 as decimal(3, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8030,7 +8030,7 @@ SELECT cast(1 as decimal(5, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8039,7 +8039,7 @@ SELECT cast(1 as decimal(10, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8048,7 +8048,7 @@ SELECT cast(1 as decimal(20, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8281,7 +8281,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8290,7 +8290,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8299,7 +8299,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8308,7 +8308,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8317,7 +8317,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(3, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8326,7 +8326,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(5, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8335,7 +8335,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(10, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8344,7 +8344,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(20, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8353,7 +8353,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8362,7 +8362,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8371,7 +8371,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8380,7 +8380,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8645,7 +8645,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8654,7 +8654,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8663,7 +8663,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8672,7 +8672,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8681,7 +8681,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8690,7 +8690,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8699,7 +8699,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8708,7 +8708,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8717,7 +8717,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8726,7 +8726,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8735,7 +8735,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8744,7 +8744,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8753,7 +8753,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8762,7 +8762,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8771,7 +8771,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -8780,7 +8780,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9013,7 +9013,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9022,7 +9022,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9031,7 +9031,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9040,7 +9040,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9049,7 +9049,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(3, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9058,7 +9058,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(5, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9067,7 +9067,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(10, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9076,7 +9076,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(20, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9085,7 +9085,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9094,7 +9094,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9103,7 +9103,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9112,7 +9112,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9377,7 +9377,7 @@ SELECT cast(1 as decimal(3, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9386,7 +9386,7 @@ SELECT cast(1 as decimal(5, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9395,7 +9395,7 @@ SELECT cast(1 as decimal(10, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9404,7 +9404,7 @@ SELECT cast(1 as decimal(20, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9445,7 +9445,7 @@ SELECT cast(1 as decimal(3, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9454,7 +9454,7 @@ SELECT cast(1 as decimal(5, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9463,7 +9463,7 @@ SELECT cast(1 as decimal(10, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9472,7 +9472,7 @@ SELECT cast(1 as decimal(20, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9481,7 +9481,7 @@ SELECT cast(1 as decimal(3, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9490,7 +9490,7 @@ SELECT cast(1 as decimal(5, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9499,7 +9499,7 @@ SELECT cast(1 as decimal(10, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -9508,4 +9508,4 @@ SELECT cast(1 as decimal(20, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out index f08d2255f1840..f434b5201f33d 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out @@ -77,7 +77,7 @@ SELECT cast(1 as tinyint) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('1' AS BINARY))' (tinyint and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT cast(1 as tinyint) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST(1 AS BOOLEAN))' (tinyint and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT cast(1 as tinyint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (tinyint and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT cast(1 as tinyint) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00' AS DATE))' (tinyint and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT cast(1 as smallint) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('1' AS BINARY))' (smallint and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT cast(1 as smallint) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST(1 AS BOOLEAN))' (smallint and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT cast(1 as smallint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (smallint and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SELECT cast(1 as smallint) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00' AS DATE))' (smallint and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT cast(1 as int) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('1' AS BINARY))' (int and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT cast(1 as int) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST(1 AS BOOLEAN))' (int and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT cast(1 as int) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (int and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT cast(1 as int) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00' AS DATE))' (int and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT cast(1 as bigint) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('1' AS BINARY))' (bigint and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT cast(1 as bigint) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST(1 AS BOOLEAN))' (bigint and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT cast(1 as bigint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (bigint and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT cast(1 as bigint) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00' AS DATE))' (bigint and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -477,7 +477,7 @@ SELECT cast(1 as float) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('1' AS BINARY))' (float and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT cast(1 as float) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST(1 AS BOOLEAN))' (float and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT cast(1 as float) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (float and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -504,7 +504,7 @@ SELECT cast(1 as float) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00' AS DATE))' (float and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -577,7 +577,7 @@ SELECT cast(1 as double) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -586,7 +586,7 @@ SELECT cast(1 as double) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -595,7 +595,7 @@ SELECT cast(1 as double) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -604,7 +604,7 @@ SELECT cast(1 as double) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT cast(1 as decimal(10, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -704,7 +704,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -777,7 +777,7 @@ SELECT cast(1 as string) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -786,7 +786,7 @@ SELECT cast(1 as string) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -795,7 +795,7 @@ SELECT cast(1 as string) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -804,7 +804,7 @@ SELECT cast(1 as string) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -813,7 +813,7 @@ SELECT cast('1' as binary) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS TINYINT))' (binary and tinyint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -822,7 +822,7 @@ SELECT cast('1' as binary) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS SMALLINT))' (binary and smallint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -831,7 +831,7 @@ SELECT cast('1' as binary) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS INT))' (binary and int).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -840,7 +840,7 @@ SELECT cast('1' as binary) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS BIGINT))' (binary and bigint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -849,7 +849,7 @@ SELECT cast('1' as binary) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS FLOAT))' (binary and float).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -858,7 +858,7 @@ SELECT cast('1' as binary) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DOUBLE))' (binary and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -867,7 +867,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -876,7 +876,7 @@ SELECT cast('1' as binary) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(CAST(1 AS STRING) AS DOUBLE))' (binary and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -885,7 +885,7 @@ SELECT cast('1' as binary) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST('1' AS BINARY))' due to data type mismatch: '(CAST('1' AS BINARY) / CAST('1' AS BINARY))' requires (double or decimal) type, not binary; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -894,7 +894,7 @@ SELECT cast('1' as binary) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS BOOLEAN))' (binary and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -903,7 +903,7 @@ SELECT cast('1' as binary) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (binary and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -912,7 +912,7 @@ SELECT cast('1' as binary) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00' AS DATE))' (binary and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -921,7 +921,7 @@ SELECT cast(1 as boolean) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS TINYINT))' (boolean and tinyint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -930,7 +930,7 @@ SELECT cast(1 as boolean) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS SMALLINT))' (boolean and smallint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -939,7 +939,7 @@ SELECT cast(1 as boolean) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS INT))' (boolean and int).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -948,7 +948,7 @@ SELECT cast(1 as boolean) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS BIGINT))' (boolean and bigint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -957,7 +957,7 @@ SELECT cast(1 as boolean) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS FLOAT))' (boolean and float).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -966,7 +966,7 @@ SELECT cast(1 as boolean) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS DOUBLE))' (boolean and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -975,7 +975,7 @@ SELECT cast(1 as boolean) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS DECIMAL(10,0)))' (boolean and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -984,7 +984,7 @@ SELECT cast(1 as boolean) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(CAST(1 AS STRING) AS DOUBLE))' (boolean and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -993,7 +993,7 @@ SELECT cast(1 as boolean) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1002,7 +1002,7 @@ SELECT cast(1 as boolean) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS BOOLEAN))' due to data type mismatch: '(CAST(1 AS BOOLEAN) / CAST(1 AS BOOLEAN))' requires (double or decimal) type, not boolean; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1011,7 +1011,7 @@ SELECT cast(1 as boolean) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1020,7 +1020,7 @@ SELECT cast(1 as boolean) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1029,7 +1029,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS TINYINT))' (timestamp and tinyint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1038,7 +1038,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS SMALLINT))' (timestamp and smallint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1047,7 +1047,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS INT))' (timestamp and int).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1056,7 +1056,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BIGINT))' (timestamp and bigint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1065,7 +1065,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS FLOAT))' (timestamp and float).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1074,7 +1074,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DOUBLE))' (timestamp and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1083,7 +1083,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1092,7 +1092,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(CAST(1 AS STRING) AS DOUBLE))' (timestamp and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1101,7 +1101,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS BINARY))' (timestamp and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1110,7 +1110,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BOOLEAN))' (timestamp and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1119,7 +1119,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' requires (double or decimal) type, not timestamp; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1128,7 +1128,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00' AS DATE))' (timestamp and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1137,7 +1137,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS TINYINT))' (date and tinyint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1146,7 +1146,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS SMALLINT))' (date and smallint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1155,7 +1155,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS INT))' (date and int).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1164,7 +1164,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BIGINT))' (date and bigint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1173,7 +1173,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS FLOAT))' (date and float).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1182,7 +1182,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DOUBLE))' (date and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1191,7 +1191,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1200,7 +1200,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(CAST(1 AS STRING) AS DOUBLE))' (date and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1209,7 +1209,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS BINARY))' (date and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1218,7 +1218,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BOOLEAN))' (date and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1227,7 +1227,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast('2017-12-11 09:30:00.0' as tim struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (date and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1236,4 +1236,4 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00' AS DATE))' requires (double or decimal) type, not date; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out index 2ef149f5f379c..d1f0c32c1e16f 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out @@ -77,7 +77,7 @@ SELECT IF(true, cast(1 as tinyint), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2' AS BINARY)))' (tinyint and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT IF(true, cast(1 as tinyint), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST(2 AS BOOLEAN)))' (tinyint and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT IF(true, cast(1 as tinyint), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (tinyint and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT IF(true, cast(1 as tinyint), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' (tinyint and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT IF(true, cast(1 as smallint), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2' AS BINARY)))' (smallint and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT IF(true, cast(1 as smallint), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BOOLEAN)))' (smallint and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT IF(true, cast(1 as smallint), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (smallint and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SELECT IF(true, cast(1 as smallint), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' (smallint and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT IF(true, cast(1 as int), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS INT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2' AS BINARY)))' (int and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT IF(true, cast(1 as int), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS INT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST(2 AS BOOLEAN)))' (int and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT IF(true, cast(1 as int), cast('2017-12-11 09:30:00.0' as timestamp)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (int and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT IF(true, cast(1 as int), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' (int and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT IF(true, cast(1 as bigint), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2' AS BINARY)))' (bigint and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT IF(true, cast(1 as bigint), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST(2 AS BOOLEAN)))' (bigint and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT IF(true, cast(1 as bigint), cast('2017-12-11 09:30:00.0' as timestamp)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (bigint and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT IF(true, cast(1 as bigint), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' (bigint and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -477,7 +477,7 @@ SELECT IF(true, cast(1 as float), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2' AS BINARY)))' (float and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT IF(true, cast(1 as float), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST(2 AS BOOLEAN)))' (float and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT IF(true, cast(1 as float), cast('2017-12-11 09:30:00.0' as timestamp)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (float and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -504,7 +504,7 @@ SELECT IF(true, cast(1 as float), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' (float and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -577,7 +577,7 @@ SELECT IF(true, cast(1 as double), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2' AS BINARY)))' (double and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -586,7 +586,7 @@ SELECT IF(true, cast(1 as double), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST(2 AS BOOLEAN)))' (double and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -595,7 +595,7 @@ SELECT IF(true, cast(1 as double), cast('2017-12-11 09:30:00.0' as timestamp)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (double and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -604,7 +604,7 @@ SELECT IF(true, cast(1 as double), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' (double and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2' AS BINARY)))' (decimal(10,0) and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS BOOLEAN)))' (decimal(10,0) and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00.0' as times struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (decimal(10,0) and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -704,7 +704,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00' as date)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' (decimal(10,0) and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -777,7 +777,7 @@ SELECT IF(true, cast(1 as string), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS STRING), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS STRING), CAST('2' AS BINARY)))' (string and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -786,7 +786,7 @@ SELECT IF(true, cast(1 as string), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))' (string and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -811,7 +811,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS TINYINT)))' (binary and tinyint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -820,7 +820,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS SMALLINT)))' (binary and smallint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -829,7 +829,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS INT)))' (binary and int).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -838,7 +838,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS BIGINT)))' (binary and bigint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -847,7 +847,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS FLOAT)))' (binary and float).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -856,7 +856,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS DOUBLE)))' (binary and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -865,7 +865,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS DECIMAL(10,0))))' (binary and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -874,7 +874,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS STRING)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS STRING)))' (binary and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -891,7 +891,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS BOOLEAN)))' (binary and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -900,7 +900,7 @@ SELECT IF(true, cast('1' as binary), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (binary and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -909,7 +909,7 @@ SELECT IF(true, cast('1' as binary), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' (binary and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -918,7 +918,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS TINYINT)))' (boolean and tinyint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -927,7 +927,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS SMALLINT)))' (boolean and smallint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -936,7 +936,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS INT)))' (boolean and int).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -945,7 +945,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS BIGINT)))' (boolean and bigint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -954,7 +954,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS FLOAT)))' (boolean and float).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -963,7 +963,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DOUBLE)))' (boolean and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -972,7 +972,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DECIMAL(10,0))))' (boolean and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -981,7 +981,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))' (boolean and string).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -990,7 +990,7 @@ SELECT IF(true, cast(1 as boolean), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2' AS BINARY)))' (boolean and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1007,7 +1007,7 @@ SELECT IF(true, cast(1 as boolean), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (boolean and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1016,7 +1016,7 @@ SELECT IF(true, cast(1 as boolean), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' (boolean and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1025,7 +1025,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as tinyint)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS TINYINT)))' (timestamp and tinyint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1034,7 +1034,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as smallint)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS SMALLINT)))' (timestamp and smallint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1043,7 +1043,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as int)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS INT)))' (timestamp and int).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1052,7 +1052,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as bigint)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BIGINT)))' (timestamp and bigint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1061,7 +1061,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as float)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS FLOAT)))' (timestamp and float).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1070,7 +1070,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as double)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DOUBLE)))' (timestamp and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1079,7 +1079,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as decimal(10 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DECIMAL(10,0))))' (timestamp and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1096,7 +1096,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast('2' as binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('2' AS BINARY)))' (timestamp and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1105,7 +1105,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as boolean)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BOOLEAN)))' (timestamp and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1130,7 +1130,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS TINYINT)))' (date and tinyint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1139,7 +1139,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS SMALLINT)))' (date and smallint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1148,7 +1148,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS INT)))' (date and int).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1157,7 +1157,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BIGINT)))' (date and bigint).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1166,7 +1166,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS FLOAT)))' (date and float).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1175,7 +1175,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DOUBLE)))' (date and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1184,7 +1184,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as decimal(10, 0))) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DECIMAL(10,0))))' (date and decimal(10,0)).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1201,7 +1201,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST('2' AS BINARY)))' (date and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1210,7 +1210,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BOOLEAN)))' (date and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out index d81ae31f1b815..89db8e95cc593 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out @@ -77,7 +77,7 @@ SELECT cast(1 as tinyint) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: tinyint != binary; line 1 pos 26 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT cast(1 as tinyint) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: tinyint != boolean; line 1 pos 26 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT cast(1 as tinyint) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: tinyint != timestamp; line 1 pos 26 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT cast(1 as tinyint) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: tinyint != date; line 1 pos 26 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT cast(1 as smallint) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: smallint != binary; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT cast(1 as smallint) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: smallint != boolean; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT cast(1 as smallint) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: smallint != timestamp; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SELECT cast(1 as smallint) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: smallint != date; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT cast(1 as int) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: int != binary; line 1 pos 22 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT cast(1 as int) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: int != boolean; line 1 pos 22 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT cast(1 as int) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: int != timestamp; line 1 pos 22 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT cast(1 as int) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: int != date; line 1 pos 22 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT cast(1 as bigint) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: bigint != binary; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT cast(1 as bigint) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: bigint != boolean; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT cast(1 as bigint) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: bigint != timestamp; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT cast(1 as bigint) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: bigint != date; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -477,7 +477,7 @@ SELECT cast(1 as float) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: float != binary; line 1 pos 24 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT cast(1 as float) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: float != boolean; line 1 pos 24 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT cast(1 as float) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: float != timestamp; line 1 pos 24 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -504,7 +504,7 @@ SELECT cast(1 as float) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: float != date; line 1 pos 24 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -577,7 +577,7 @@ SELECT cast(1 as double) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: double != binary; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -586,7 +586,7 @@ SELECT cast(1 as double) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: double != boolean; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -595,7 +595,7 @@ SELECT cast(1 as double) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: double != timestamp; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -604,7 +604,7 @@ SELECT cast(1 as double) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: double != date; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != binary; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != boolean; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != timestamp; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -704,7 +704,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != date; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -777,7 +777,7 @@ SELECT cast(1 as string) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS STRING) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: string != binary; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -786,7 +786,7 @@ SELECT cast(1 as string) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: string != boolean; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -811,7 +811,7 @@ SELECT cast('1' as binary) in (cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: binary != tinyint; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -820,7 +820,7 @@ SELECT cast('1' as binary) in (cast(1 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: binary != smallint; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -829,7 +829,7 @@ SELECT cast('1' as binary) in (cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: binary != int; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -838,7 +838,7 @@ SELECT cast('1' as binary) in (cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: binary != bigint; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -847,7 +847,7 @@ SELECT cast('1' as binary) in (cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: binary != float; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -856,7 +856,7 @@ SELECT cast('1' as binary) in (cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: binary != double; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -865,7 +865,7 @@ SELECT cast('1' as binary) in (cast(1 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: binary != decimal(10,0); line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -874,7 +874,7 @@ SELECT cast('1' as binary) in (cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: binary != string; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -891,7 +891,7 @@ SELECT cast('1' as binary) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: binary != boolean; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -900,7 +900,7 @@ SELECT cast('1' as binary) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: binary != timestamp; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -909,7 +909,7 @@ SELECT cast('1' as binary) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: binary != date; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -918,7 +918,7 @@ SELECT true in (cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true IN (CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: boolean != tinyint; line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -927,7 +927,7 @@ SELECT true in (cast(1 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true IN (CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: boolean != smallint; line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -936,7 +936,7 @@ SELECT true in (cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true IN (CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: boolean != int; line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -945,7 +945,7 @@ SELECT true in (cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true IN (CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: boolean != bigint; line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -954,7 +954,7 @@ SELECT true in (cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true IN (CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: boolean != float; line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -963,7 +963,7 @@ SELECT true in (cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true IN (CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: boolean != double; line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -972,7 +972,7 @@ SELECT true in (cast(1 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true IN (CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: boolean != decimal(10,0); line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -981,7 +981,7 @@ SELECT true in (cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true IN (CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: boolean != string; line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -990,7 +990,7 @@ SELECT true in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: boolean != binary; line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1007,7 +1007,7 @@ SELECT true in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: boolean != timestamp; line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1016,7 +1016,7 @@ SELECT true in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(true IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: boolean != date; line 1 pos 12 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1025,7 +1025,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != tinyint; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1034,7 +1034,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as smallint)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != smallint; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1043,7 +1043,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS INT)))' due to data type mismatch: Arguments must be same type but were: timestamp != int; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1052,7 +1052,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != bigint; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1061,7 +1061,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: timestamp != float; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1070,7 +1070,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: timestamp != double; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1079,7 +1079,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as decimal(10, 0))) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: timestamp != decimal(10,0); line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1096,7 +1096,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2' as binary)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: timestamp != binary; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1105,7 +1105,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: timestamp != boolean; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1130,7 +1130,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: date != tinyint; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1139,7 +1139,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: date != smallint; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1148,7 +1148,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS INT)))' due to data type mismatch: Arguments must be same type but were: date != int; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1157,7 +1157,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: date != bigint; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1166,7 +1166,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: date != float; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1175,7 +1175,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: date != double; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1184,7 +1184,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: date != decimal(10,0); line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1201,7 +1201,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: date != binary; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1210,7 +1210,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: date != boolean; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1299,7 +1299,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: tinyint != binary; line 1 pos 26 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1308,7 +1308,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: tinyint != boolean; line 1 pos 26 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1317,7 +1317,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast('2017-12-11 09:30:00.0' a struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: tinyint != timestamp; line 1 pos 26 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1326,7 +1326,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: tinyint != date; line 1 pos 26 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1399,7 +1399,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: smallint != binary; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1408,7 +1408,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: smallint != boolean; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1417,7 +1417,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: smallint != timestamp; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1426,7 +1426,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: smallint != date; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1499,7 +1499,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: int != binary; line 1 pos 22 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1508,7 +1508,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: int != boolean; line 1 pos 22 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1517,7 +1517,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast('2017-12-11 09:30:00.0' as timest struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: int != timestamp; line 1 pos 22 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1526,7 +1526,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast('2017-12-11 09:30:00' as date)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: int != date; line 1 pos 22 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1599,7 +1599,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: bigint != binary; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1608,7 +1608,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: bigint != boolean; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1617,7 +1617,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast('2017-12-11 09:30:00.0' as struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: bigint != timestamp; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1626,7 +1626,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast('2017-12-11 09:30:00' as da struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: bigint != date; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1699,7 +1699,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: float != binary; line 1 pos 24 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1708,7 +1708,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: float != boolean; line 1 pos 24 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1717,7 +1717,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: float != timestamp; line 1 pos 24 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1726,7 +1726,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast('2017-12-11 09:30:00' as date struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: float != date; line 1 pos 24 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1799,7 +1799,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: double != binary; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1808,7 +1808,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: double != boolean; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1817,7 +1817,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast('2017-12-11 09:30:00.0' as struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: double != timestamp; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1826,7 +1826,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast('2017-12-11 09:30:00' as da struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: double != date; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1899,7 +1899,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast('1' as bina struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != binary; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1908,7 +1908,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast(1 as boolea struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != boolean; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1917,7 +1917,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast('2017-12-11 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != timestamp; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1926,7 +1926,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast('2017-12-11 struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != date; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1999,7 +1999,7 @@ SELECT cast(1 as string) in (cast(1 as string), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: string != binary; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2008,7 +2008,7 @@ SELECT cast(1 as string) in (cast(1 as string), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: string != boolean; line 1 pos 25 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2033,7 +2033,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: binary != tinyint; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2042,7 +2042,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: binary != smallint; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2051,7 +2051,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: binary != int; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2060,7 +2060,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: binary != bigint; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2069,7 +2069,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: binary != float; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2078,7 +2078,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: binary != double; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2087,7 +2087,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as decimal(10, 0))) F struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: binary != decimal(10,0); line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2096,7 +2096,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: binary != string; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2113,7 +2113,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: binary != boolean; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2122,7 +2122,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: binary != timestamp; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2131,7 +2131,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: binary != date; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2140,7 +2140,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: boolean != tinyint; line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2149,7 +2149,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as smallint)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: boolean != smallint; line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2158,7 +2158,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: boolean != int; line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2167,7 +2167,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: boolean != bigint; line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2176,7 +2176,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: boolean != float; line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2185,7 +2185,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: boolean != double; line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2194,7 +2194,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as decimal(10, 0))) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: boolean != decimal(10,0); line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2203,7 +2203,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: boolean != string; line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2212,7 +2212,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast('1' as binary)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: boolean != binary; line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2229,7 +2229,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast('2017-12-11 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: boolean != timestamp; line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2238,7 +2238,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast('2017-12-11 09:30:00' struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: boolean != date; line 1 pos 28 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2247,7 +2247,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != tinyint; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2256,7 +2256,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != smallint; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2265,7 +2265,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: timestamp != int; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2274,7 +2274,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != bigint; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2283,7 +2283,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: timestamp != float; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2292,7 +2292,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: timestamp != double; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2301,7 +2301,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: timestamp != decimal(10,0); line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2318,7 +2318,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: timestamp != binary; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2327,7 +2327,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: timestamp != boolean; line 1 pos 50 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2352,7 +2352,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: date != tinyint; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2361,7 +2361,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: date != smallint; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2370,7 +2370,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: date != int; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2379,7 +2379,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: date != bigint; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2388,7 +2388,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: date != float; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2397,7 +2397,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: date != double; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2406,7 +2406,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: date != decimal(10,0); line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2423,7 +2423,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: date != binary; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -2432,7 +2432,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: date != boolean; line 1 pos 43 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out index 2f176951df840..4bd8da674c52c 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out @@ -82,7 +82,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'map_zip_with(various_maps.decimal_map1, various_maps.decimal_map2, lambdafunction(struct(k, v1, v2), k, v1, v2))' due to argument data type mismatch: The input to function map_zip_with should have been two maps with compatible key types, but the key types are [decimal(36,0), decimal(36,35)].; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -110,7 +110,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'map_zip_with(various_maps.decimal_map2, various_maps.int_map, lambdafunction(struct(k, v1, v2), k, v1, v2))' due to argument data type mismatch: The input to function map_zip_with should have been two maps with compatible key types, but the key types are [decimal(36,35), int].; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out index 916d32c5e35c7..b67ed4e751776 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out @@ -91,7 +91,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'map_concat(various_maps.tinyint_map1, various_maps.array_map1)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,array>]; line 2 pos 4 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -102,7 +102,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'map_concat(various_maps.boolean_map1, various_maps.int_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map]; line 2 pos 4 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -113,7 +113,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'map_concat(various_maps.int_map1, various_maps.struct_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,struct>]; line 2 pos 4 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -124,7 +124,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'map_concat(various_maps.struct_map1, various_maps.array_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map,struct>, map,array>]; line 2 pos 4 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -135,4 +135,4 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'map_concat(various_maps.int_map1, various_maps.array_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,array>]; line 2 pos 4 +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out index c24b06768e9fc..35ca14b6f3381 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out @@ -77,7 +77,7 @@ SELECT '1' + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT '1' + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT '1' + cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT '1' + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT '1' - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) - CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT '1' - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) - CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT '1' - cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '('1' - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -276,7 +276,7 @@ SELECT '1' * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -285,7 +285,7 @@ SELECT '1' * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -294,7 +294,7 @@ SELECT '1' * cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -303,7 +303,7 @@ SELECT '1' * cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -376,7 +376,7 @@ SELECT '1' / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -385,7 +385,7 @@ SELECT '1' / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -394,7 +394,7 @@ SELECT '1' / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -403,7 +403,7 @@ SELECT '1' / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -476,7 +476,7 @@ SELECT '1' % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -485,7 +485,7 @@ SELECT '1' % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -494,7 +494,7 @@ SELECT '1' % cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -503,7 +503,7 @@ SELECT '1' % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -576,7 +576,7 @@ SELECT pmod('1', cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -585,7 +585,7 @@ SELECT pmod('1', cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -594,7 +594,7 @@ SELECT pmod('1', cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -603,7 +603,7 @@ SELECT pmod('1', cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -668,7 +668,7 @@ SELECT cast('1' as binary) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT cast(1 as boolean) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) + CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -760,7 +760,7 @@ SELECT cast('1' as binary) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) - CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -769,7 +769,7 @@ SELECT cast(1 as boolean) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) - CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) - CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -778,7 +778,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - '1')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -787,7 +787,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -852,7 +852,7 @@ SELECT cast('1' as binary) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -861,7 +861,7 @@ SELECT cast(1 as boolean) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) * CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -870,7 +870,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) * CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -879,7 +879,7 @@ SELECT cast('2017-12-11 09:30:00' as date) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) * CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -944,7 +944,7 @@ SELECT cast('1' as binary) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -953,7 +953,7 @@ SELECT cast(1 as boolean) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -962,7 +962,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -971,7 +971,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1036,7 +1036,7 @@ SELECT cast('1' as binary) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('1' AS BINARY) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1045,7 +1045,7 @@ SELECT cast(1 as boolean) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST(1 AS BOOLEAN) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) % CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1054,7 +1054,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1063,7 +1063,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1128,7 +1128,7 @@ SELECT pmod(cast('1' as binary), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('1' AS BINARY), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1137,7 +1137,7 @@ SELECT pmod(cast(1 as boolean), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST(1 AS BOOLEAN), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS BOOLEAN), CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1146,7 +1146,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1155,7 +1155,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out index e987cf4f71c09..b5055eaed94fd 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out @@ -101,7 +101,7 @@ select cast(a as array) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 't.a' due to data type mismatch: cannot cast string to array; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -110,7 +110,7 @@ select cast(a as struct) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 't.a' due to data type mismatch: cannot cast string to struct; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -119,7 +119,7 @@ select cast(a as map) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 't.a' due to data type mismatch: cannot cast string to map; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -136,8 +136,7 @@ select to_timestamp('2018-01-01', a) from t struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'aa' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'aa'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -154,8 +153,7 @@ select to_unix_timestamp('2018-01-01', a) from t struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'aa' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'aa'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query @@ -172,8 +170,7 @@ select unix_timestamp('2018-01-01', a) from t struct<> -- !query output org.apache.spark.SparkUpgradeException -[INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0: -Fail to recognize 'aa' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'aa'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out index f830797212a76..602c86025febd 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out @@ -85,7 +85,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with tinyint at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -94,7 +94,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with tinyint at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -103,7 +103,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with tinyint at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -112,7 +112,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with tinyint at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -193,7 +193,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with smallint at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -202,7 +202,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with smallint at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -211,7 +211,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with smallint at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -220,7 +220,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast('2017-12-11 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with smallint at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -301,7 +301,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with int at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -310,7 +310,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with int at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -319,7 +319,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as timest struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with int at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -328,7 +328,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with int at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -409,7 +409,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with bigint at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -418,7 +418,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with bigint at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -427,7 +427,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as tim struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with bigint at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -436,7 +436,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with bigint at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -517,7 +517,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with float at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -526,7 +526,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with float at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -535,7 +535,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as time struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with float at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -544,7 +544,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with float at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -625,7 +625,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with double at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -634,7 +634,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with double at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -643,7 +643,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as tim struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with double at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -652,7 +652,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with double at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -733,7 +733,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with decimal(10,0) at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -742,7 +742,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with decimal(10,0) at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -751,7 +751,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast('2017-12-11 09:30:00.0 struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with decimal(10,0) at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -760,7 +760,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast('2017-12-11 09:30:00' struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with decimal(10,0) at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -841,7 +841,7 @@ SELECT cast(1 as string) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with string at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -850,7 +850,7 @@ SELECT cast(1 as string) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with string at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -877,7 +877,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with binary at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -886,7 +886,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with binary at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -895,7 +895,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with binary at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -904,7 +904,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with binary at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -913,7 +913,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with binary at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -922,7 +922,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with binary at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -931,7 +931,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with binary at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -940,7 +940,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is string type which is not compatible with binary at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -958,7 +958,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with binary at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -967,7 +967,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with binary at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -976,7 +976,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast('2017-12-11 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with binary at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -985,7 +985,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with boolean at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -994,7 +994,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with boolean at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1003,7 +1003,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with boolean at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1012,7 +1012,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with boolean at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1021,7 +1021,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with boolean at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1030,7 +1030,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with boolean at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1039,7 +1039,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with boolean at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1048,7 +1048,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is string type which is not compatible with boolean at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1057,7 +1057,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with boolean at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1074,7 +1074,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with boolean at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1083,7 +1083,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with boolean at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1092,7 +1092,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with timestamp at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1101,7 +1101,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with timestamp at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1110,7 +1110,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with timestamp at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1119,7 +1119,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with timestamp at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1128,7 +1128,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with timestamp at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1137,7 +1137,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with timestamp at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1146,7 +1146,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with timestamp at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1164,7 +1164,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast('2' a struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with timestamp at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1173,7 +1173,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with timestamp at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1200,7 +1200,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as tinyint struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with date at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1209,7 +1209,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as smallin struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with date at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1218,7 +1218,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as int) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with date at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1227,7 +1227,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as bigint) struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with date at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1236,7 +1236,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as float) struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with date at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1245,7 +1245,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as double) struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with date at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1254,7 +1254,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as decimal struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with date at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1272,7 +1272,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast('2' as binar struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with date at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1281,7 +1281,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as boolean struct<> -- !query output org.apache.spark.sql.AnalysisException -Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with date at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out index 103465004829d..ff11f2a3b4b31 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out @@ -165,7 +165,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast(1 as string) DESC RANGE BETWE struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS STRING) FOLLOWING' due to data type mismatch: The data type of the upper bound 'string' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -174,7 +174,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast('1' as binary) DESC RANGE BET struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS BINARY) FOLLOWING' due to data type mismatch: The data type of the upper bound 'binary' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -183,7 +183,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast(1 as boolean) DESC RANGE BETW struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS BOOLEAN) FOLLOWING' due to data type mismatch: The data type of the upper bound 'boolean' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -192,7 +192,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(PARTITION BY 1 ORDER BY CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) DESC NULLS LAST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 21 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udaf.sql.out b/sql/core/src/test/resources/sql-tests/results/udaf.sql.out index 6f674e1166c07..9d948f6ee1290 100644 --- a/sql/core/src/test/resources/sql-tests/results/udaf.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udaf.sql.out @@ -31,7 +31,7 @@ SELECT default.myDoubleAvg(int_col1, 3) as my_avg from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function spark_catalog.default.myDoubleAvg. Expected: 1; Found: 2; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -48,7 +48,7 @@ SELECT default.udaf1(int_col1) as udaf1 from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Can not load class 'test.non.existent.udaf' when registering the function 'spark_catalog.default.udaf1', please make sure it is on the classpath; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out index c646e99935475..c18dce09f73a9 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out @@ -475,7 +475,7 @@ having exists (select 1 from onek b struct<> -- !query output org.apache.spark.sql.AnalysisException -Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: sum(DISTINCT (outer(a.four) + b.four)) +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -486,4 +486,4 @@ from tenk1 o struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `o`.`unique1` cannot be resolved. Did you mean one of the following? [`i`.`unique1`, `i`.`unique2`, `i`.`hundred`, `i`.`even`, `i`.`four`, `i`.`stringu1`, `i`.`ten`, `i`.`odd`, `i`.`string4`, `i`.`stringu2`, `i`.`tenthous`, `i`.`twenty`, `i`.`two`, `i`.`thousand`, `i`.`fivethous`, `i`.`twothousand`]; line 2 pos 67 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`o`.`unique1`","`i`.`unique1`, `i`.`unique2`, `i`.`hundred`, `i`.`even`, `i`.`four`, `i`.`stringu1`, `i`.`ten`, `i`.`odd`, `i`.`string4`, `i`.`stringu2`, `i`.`tenthous`, `i`.`twenty`, `i`.`two`, `i`.`thousand`, `i`.`fivethous`, `i`.`twothousand`"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out index f44b69eaadcb6..d9691fd6f45ba 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out @@ -5,7 +5,7 @@ select udf(max(min(unique1))) from tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out index b95cbc00f35ca..5d5ffa1311b74 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out @@ -546,7 +546,7 @@ SELECT udf('') AS `xxx`, udf(i) AS i, udf(k), udf(t) AS t struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'i' is ambiguous, could be: spark_catalog.default.j1_tbl.i, spark_catalog.default.j2_tbl.i.; line 1 pos 29 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3263,7 +3263,7 @@ select * from struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'f1' is ambiguous, could be: j.f1, j.f1.; line 2 pos 72 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -3273,7 +3273,7 @@ select * from struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `y`.`f1` cannot be resolved. Did you mean one of the following? [`j`.`f1`, `j`.`f1`, `x`.`q1`, `x`.`q2`]; line 2 pos 72 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`y`.`f1`","`j`.`f1`, `j`.`f1`, `x`.`q1`, `x`.`q2`"],"queryContext":[]} -- !query @@ -3292,7 +3292,7 @@ select udf(t1.uunique1) from struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `t1`.`uunique1` cannot be resolved. Did you mean one of the following? [`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`stringu1`, `t1`.`even`, `t1`.`four`, `t1`.`string4`, `t2`.`stringu1`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`even`, `t2`.`four`, `t1`.`odd`, `t2`.`string4`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`odd`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`]; line 1 pos 11 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t1`.`uunique1`","`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`stringu1`, `t1`.`even`, `t1`.`four`, `t1`.`string4`, `t2`.`stringu1`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`even`, `t2`.`four`, `t1`.`odd`, `t2`.`string4`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`odd`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`"],"queryContext":[]} -- !query @@ -3302,7 +3302,7 @@ select udf(udf(t2.uunique1)) from struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `t2`.`uunique1` cannot be resolved. Did you mean one of the following? [`t2`.`unique1`, `t1`.`unique1`, `t2`.`unique2`, `t1`.`unique2`, `t2`.`hundred`, `t1`.`hundred`, `t2`.`stringu1`, `t2`.`even`, `t2`.`four`, `t2`.`string4`, `t1`.`stringu1`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`even`, `t1`.`four`, `t2`.`odd`, `t1`.`string4`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`odd`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`twothousand`, `t1`.`twothousand`]; line 1 pos 15 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t2`.`uunique1`","`t2`.`unique1`, `t1`.`unique1`, `t2`.`unique2`, `t1`.`unique2`, `t2`.`hundred`, `t1`.`hundred`, `t2`.`stringu1`, `t2`.`even`, `t2`.`four`, `t2`.`string4`, `t1`.`stringu1`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`even`, `t1`.`four`, `t2`.`odd`, `t1`.`string4`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`odd`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`twothousand`, `t1`.`twothousand`"],"queryContext":[]} -- !query @@ -3312,7 +3312,7 @@ select udf(uunique1) from struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `uunique1` cannot be resolved. Did you mean one of the following? [`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`even`, `t2`.`even`, `t1`.`four`, `t2`.`four`, `t1`.`ten`, `t2`.`ten`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`odd`, `t2`.`odd`, `t1`.`two`, `t2`.`two`, `t1`.`stringu1`, `t2`.`stringu1`, `t1`.`twenty`, `t2`.`twenty`, `t1`.`string4`, `t2`.`string4`, `t1`.`stringu2`, `t2`.`stringu2`, `t1`.`tenthous`, `t2`.`tenthous`, `t1`.`thousand`, `t2`.`thousand`, `t1`.`fivethous`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`]; line 1 pos 11 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`uunique1`","`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`even`, `t2`.`even`, `t1`.`four`, `t2`.`four`, `t1`.`ten`, `t2`.`ten`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`odd`, `t2`.`odd`, `t1`.`two`, `t2`.`two`, `t1`.`stringu1`, `t2`.`stringu1`, `t1`.`twenty`, `t2`.`twenty`, `t1`.`string4`, `t2`.`string4`, `t1`.`stringu2`, `t2`.`stringu2`, `t1`.`tenthous`, `t2`.`tenthous`, `t1`.`thousand`, `t2`.`thousand`, `t1`.`fivethous`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`"],"queryContext":[]} -- !query @@ -3512,7 +3512,7 @@ select udf(udf(f1,g)) from int4_tbl a, (select udf(udf(f1)) as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `f1` cannot be resolved. Did you mean one of the following? []; line 1 pos 55 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`f1`",""],"queryContext":[]} -- !query @@ -3521,7 +3521,7 @@ select udf(f1,g) from int4_tbl a, (select a.f1 as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `a`.`f1` cannot be resolved. Did you mean one of the following? []; line 1 pos 42 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`.`f1`",""],"queryContext":[]} -- !query @@ -3530,7 +3530,7 @@ select udf(udf(f1,g)) from int4_tbl a cross join (select udf(f1) as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `f1` cannot be resolved. Did you mean one of the following? []; line 1 pos 61 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`f1`",""],"queryContext":[]} -- !query @@ -3539,7 +3539,7 @@ select udf(f1,g) from int4_tbl a cross join (select udf(udf(a.f1)) as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `a`.`f1` cannot be resolved. Did you mean one of the following? []; line 1 pos 60 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`.`f1`",""],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out index a16887457c9df..8589d74709bcd 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out @@ -140,7 +140,7 @@ SELECT udf(a) FROM test_having HAVING udf(min(a)) < udf(max(a)) struct<> -- !query output org.apache.spark.sql.AnalysisException -grouping expressions sequence is empty, and 'spark_catalog.default.test_having.a' is not an aggregate function. Wrap '(min(spark_catalog.default.test_having.a) AS `min(a#x)`, max(spark_catalog.default.test_having.a) AS `max(a#x)`)' in windowing function(s) or wrap 'spark_catalog.default.test_having.a' in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -149,7 +149,7 @@ SELECT 1 AS one FROM test_having HAVING udf(a) > 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `a` cannot be resolved. Did you mean one of the following? [`one`]; line 1 pos 44 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`","`one`"],"queryContext":[]} -- !query @@ -173,11 +173,8 @@ SELECT 1 AS one FROM test_having WHERE 1/udf(a) = 1 HAVING 1 < 2 -- !query schema struct<> -- !query output -org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -== SQL(line 1, position 40) == -...1 AS one FROM test_having WHERE 1/udf(a) = 1 HAVING 1 < 2 - ^^^^^^^^ +org.apache.spark.SparkException +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out index 13295adab15bb..5e1a31205f6ab 100755 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out @@ -122,7 +122,7 @@ SELECT udf(count(*)) FROM test_missing_target GROUP BY udf(a) ORDER BY udf(b) struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `b` cannot be resolved. Did you mean one of the following? [`udf(count(1))`]; line 1 pos 75 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`b`","`udf(count(1))`"],"queryContext":[]} -- !query @@ -203,7 +203,7 @@ SELECT udf(c), udf(count(*)) FROM test_missing_target GROUP BY 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 63 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -214,7 +214,7 @@ SELECT udf(count(*)) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -327,7 +327,7 @@ SELECT udf(count(udf(a))) FROM test_missing_target GROUP BY udf(a) ORDER BY udf( struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `b` cannot be resolved. Did you mean one of the following? [`udf(count(udf(a)))`]; line 1 pos 80 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`b`","`udf(count(udf(a)))`"],"queryContext":[]} -- !query @@ -390,7 +390,7 @@ SELECT udf(count(udf(x.a))) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 14 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -415,7 +415,7 @@ SELECT udf(count(udf(b))) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -Reference 'b' is ambiguous, could be: x.b, y.b.; line 1 pos 21 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out index 997308bdbf67a..6394e6cf0ab9c 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out @@ -138,7 +138,7 @@ SELECT array(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -ExceptAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -210,7 +210,7 @@ SELECT k, v FROM tab4 struct<> -- !query output org.apache.spark.sql.AnalysisException -ExceptAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out index 0134ad74f5239..bc8536c67e350 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out @@ -100,12 +100,4 @@ WHERE udf(t1.v) >= (SELECT min(udf(t2.v)) struct<> -- !query output org.apache.spark.sql.AnalysisException -Correlated column is not allowed in predicate (CAST(udf(cast(k as string)) AS STRING) = CAST(udf(cast(outer(k#x) as string)) AS STRING)): -Aggregate [cast(udf(cast(max(cast(udf(cast(v#x as string)) as int)) as string)) as int) AS udf(max(udf(v)))#x] -+- Filter (cast(udf(cast(k#x as string)) as string) = cast(udf(cast(outer(k#x) as string)) as string)) - +- SubqueryAlias t2 - +- View (`t2`, [k#x,v#x]) - +- Project [cast(k#x as string) AS k#x, cast(v#x as int) AS v#x] - +- Project [k#x, v#x] - +- SubqueryAlias t2 - +- LocalRelation [k#x, v#x] +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out index 7038cfc8a8b86..490a1f56ff6bf 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out @@ -207,7 +207,7 @@ SELECT course, udf(year), GROUPING(course) FROM courseSales GROUP BY course, udf struct<> -- !query output org.apache.spark.sql.AnalysisException -grouping() can only be used with GroupingSets/Cube/Rollup +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -216,7 +216,7 @@ SELECT course, udf(year), GROUPING_ID(course, year) FROM courseSales GROUP BY ud struct<> -- !query output org.apache.spark.sql.AnalysisException -grouping_id() can only be used with GroupingSets/Cube/Rollup +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -252,7 +252,7 @@ SELECT course, udf(year) FROM courseSales GROUP BY udf(course), year HAVING GROU struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNSUPPORTED_GROUPING_EXPRESSION] grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} -- !query @@ -261,7 +261,7 @@ SELECT course, udf(udf(year)) FROM courseSales GROUP BY course, year HAVING GROU struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNSUPPORTED_GROUPING_EXPRESSION] grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} -- !query @@ -316,7 +316,7 @@ SELECT course, udf(year) FROM courseSales GROUP BY course, udf(year) ORDER BY GR struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNSUPPORTED_GROUPING_EXPRESSION] grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} -- !query @@ -325,7 +325,7 @@ SELECT course, udf(year) FROM courseSales GROUP BY course, udf(year) ORDER BY GR struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNSUPPORTED_GROUPING_EXPRESSION] grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out index 4ffefd1b6a861..fb31cf222071d 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out @@ -15,7 +15,7 @@ SELECT udf(a), udf(COUNT(b)) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(CAST(udf(cast(count(b) as string)) AS BIGINT) AS `udf(count(b))`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -43,7 +43,7 @@ SELECT udf(a), udf(COUNT(udf(b))) FROM testData GROUP BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -107,7 +107,7 @@ SELECT udf(a + 2), udf(COUNT(b)) FROM testData GROUP BY a + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -164,7 +164,7 @@ SELECT udf(COUNT(b)) AS k FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -aggregate functions are not allowed in GROUP BY, but found CAST(udf(cast(count(b) as string)) AS BIGINT) +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -182,7 +182,7 @@ SELECT k AS a, udf(COUNT(udf(v))) FROM testDataHasSameNameWithAlias GROUP BY udf struct<> -- !query output org.apache.spark.sql.AnalysisException -expression 'testdatahassamenamewithalias.k' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -199,7 +199,7 @@ SELECT a AS k, udf(COUNT(udf(b))) FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `k` cannot be resolved. Did you mean one of the following? [`testdata`.`a`, `testdata`.`b`]; line 1 pos 57 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`k`","`testdata`.`a`, `testdata`.`b`"],"queryContext":[]} -- !query @@ -271,7 +271,7 @@ SELECT udf(id) FROM range(10) HAVING id > 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -grouping expressions sequence is empty, and 'id' is not an aggregate function. Wrap '()' in windowing function(s) or wrap 'id' in first() (or first_value) if you don't care which value you get. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT every(udf(1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'every(CAST(udf(cast(1 as string)) AS INT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS INT)' is of int type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT some(udf(1S)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'some(CAST(udf(cast(1 as string)) AS SMALLINT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS SMALLINT)' is of smallint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT any(udf(1L)) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'any(CAST(udf(cast(1 as string)) AS BIGINT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS BIGINT)' is of bigint type.; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT udf(every("true")) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'every('true')' due to data type mismatch: argument 1 requires boolean type, however, ''true'' is of string type.; line 1 pos 11 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -490,10 +490,7 @@ SELECT udf(count(*)) FROM test_agg WHERE count(*) > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException - -Aggregate/Window/Generate expressions are not valid in where clause of the query. -Expression in where clause: [(count(1) > 1L)] -Invalid expressions: [count(1)] +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -502,10 +499,7 @@ SELECT udf(count(*)) FROM test_agg WHERE count(*) + 1L > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException - -Aggregate/Window/Generate expressions are not valid in where clause of the query. -Expression in where clause: [((count(1) + 1L) > 1L)] -Invalid expressions: [count(1)] +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -514,7 +508,4 @@ SELECT udf(count(*)) FROM test_agg WHERE k = 1 or k = 2 or count(*) + 1L > 1L or struct<> -- !query output org.apache.spark.sql.AnalysisException - -Aggregate/Window/Generate expressions are not valid in where clause of the query. -Expression in where clause: [(((test_agg.k = 1) OR (test_agg.k = 2)) OR (((count(1) + 1L) > 1L) OR (max(test_agg.k) > 1)))] -Invalid expressions: [count(1), max(test_agg.k)] +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out index b6c017118a695..2218f2082b04b 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out @@ -94,7 +94,7 @@ select udf(a), b from values ("one", rand(5)), ("two", 3.0D) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot evaluate expression rand(5) in inline table definition; line 1 pos 37 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -103,7 +103,7 @@ select udf(a), udf(b) from values ("one", 2.0), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -expected 2 columns but found 1 columns in row 1; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -112,7 +112,7 @@ select udf(a), udf(b) from values ("one", array(0, 1)), ("two", struct(1, 2)) as struct<> -- !query output org.apache.spark.sql.AnalysisException -incompatible types found in column b for inline table; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -121,7 +121,7 @@ select udf(a), udf(b) from values ("one"), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -expected 2 columns but found 1 columns in row 0; line 1 pos 27 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -130,7 +130,7 @@ select udf(a), udf(b) from values ("one", random_not_exist_func(1)), ("two", 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -Undefined function: random_not_exist_func. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.random_not_exist_func.; line 1 pos 42 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -139,7 +139,7 @@ select udf(a), udf(b) from values ("one", count(1)), ("two", 2) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot evaluate expression count(1) in inline table definition; line 1 pos 42 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out index 29febc747ea13..5b241c5e8cb1b 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out @@ -95,7 +95,7 @@ SELECT array(1), udf(2) struct<> -- !query output org.apache.spark.sql.AnalysisException -IntersectAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -106,7 +106,7 @@ SELECT udf(k), udf(v) FROM tab2 struct<> -- !query output org.apache.spark.sql.AnalysisException -IntersectAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out index 875e14a25df25..99a6602306bb7 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out @@ -199,7 +199,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -Aggregate expression required for pivot, but 'coursesales.earnings' did not appear in any aggregate function. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -214,7 +214,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -Aggregate expression required for pivot, but '__auto_generated_subquery_name.year' did not appear in any aggregate function. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -229,7 +229,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `year` cannot be resolved. Did you mean one of the following? [`__auto_generated_subquery_name`.`course`, `__auto_generated_subquery_name`.`earnings`]; line 4 pos 0 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`year`","`__auto_generated_subquery_name`.`course`, `__auto_generated_subquery_name`.`earnings`"],"queryContext":[]} -- !query @@ -259,7 +259,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -310,7 +310,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -[PIVOT_VALUE_DATA_TYPE_MISMATCH] Invalid pivot value 'dotNET': value data type string does not match pivot column data type struct +{"errorClass":"PIVOT_VALUE_DATA_TYPE_MISMATCH","sqlState":"42000","messageParameters":["dotNET","string","struct"],"queryContext":[]} -- !query @@ -323,7 +323,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -[UNRESOLVED_COLUMN] A column or function parameter with name `s` cannot be resolved. Did you mean one of the following? [`coursesales`.`year`, `coursesales`.`course`, `coursesales`.`earnings`]; line 4 pos 15 +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`s`","`coursesales`.`year`, `coursesales`.`course`, `coursesales`.`earnings`"],"queryContext":[]} -- !query @@ -336,7 +336,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -[NON_LITERAL_PIVOT_VALUES] Literal expressions required for pivot values, found "course". +{"errorClass":"NON_LITERAL_PIVOT_VALUES","sqlState":"42000","messageParameters":["\"course\""],"queryContext":[]} -- !query @@ -421,7 +421,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -[INCOMPARABLE_PIVOT_COLUMN] Invalid pivot column `__auto_generated_subquery_name`.`m`. Pivot columns must be comparable. +{"errorClass":"INCOMPARABLE_PIVOT_COLUMN","sqlState":"42000","messageParameters":["`__auto_generated_subquery_name`.`m`"],"queryContext":[]} -- !query @@ -438,7 +438,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -[INCOMPARABLE_PIVOT_COLUMN] Invalid pivot column `named_struct('course', __auto_generated_subquery_name`.`course, 'm', __auto_generated_subquery_name`.`m)`. Pivot columns must be comparable. +{"errorClass":"INCOMPARABLE_PIVOT_COLUMN","sqlState":"42000","messageParameters":["`named_struct('course', __auto_generated_subquery_name`.`course, 'm', __auto_generated_subquery_name`.`m)`"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out index e7c9cd016c388..6a4ff02d5efc7 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out @@ -31,7 +31,7 @@ SELECT default.myDoubleAvg(udf(int_col1), udf(3)) as my_avg from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function spark_catalog.default.myDoubleAvg. Expected: 1; Found: 2; line 1 pos 7 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -48,7 +48,7 @@ SELECT default.udaf1(udf(int_col1)) as udaf1, udf(default.udaf1(udf(int_col1))) struct<> -- !query output org.apache.spark.sql.AnalysisException -Can not load class 'test.non.existent.udaf' when registering the function 'spark_catalog.default.udaf1', please make sure it is on the classpath; line 1 pos 94 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out index 076c7dd6a1562..5b47d3633e22a 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out @@ -58,7 +58,7 @@ ROWS BETWEEN CURRENT ROW AND 2147483648 FOLLOWING) FROM testData ORDER BY udf(ca struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'ROWS BETWEEN CURRENT ROW AND 2147483648L FOLLOWING' due to data type mismatch: The data type of the upper bound 'bigint' does not match the expected data type 'int'.; line 1 pos 46 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -188,7 +188,7 @@ ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING) FROM testData ORDER BY cate, u struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING' due to data type mismatch: Window frame upper bound '1' does not follow the lower bound 'unboundedfollowing$()'.; line 1 pos 38 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -198,7 +198,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, udf(val) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 38 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -208,7 +208,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, udf(val) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY CAST(udf(cast(val as string)) AS INT) ASC NULLS FIRST, testdata.cate ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: cast(udf(cast(val#x as string)) as int) ASC NULLS FIRST,cate#x ASC NULLS FIRST; line 1 pos 38 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -218,7 +218,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, udf(val) struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY current_timestamp() ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 38 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -228,7 +228,7 @@ RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING) FROM testData ORDER BY udf(cate), val struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING' due to data type mismatch: The lower bound of a window frame must be less than or equal to the upper bound; line 1 pos 38 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -238,13 +238,7 @@ RANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cat struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Frame bound value must be a literal.(line 2, pos 30) - -== SQL == -SELECT udf(val), cate, count(val) OVER(PARTITION BY udf(cate) ORDER BY udf(val) -RANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cate, val(val) -------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -321,7 +315,7 @@ SELECT udf(val), cate, row_number() OVER(PARTITION BY cate) FROM testData ORDER struct<> -- !query output org.apache.spark.sql.AnalysisException -Window function row_number() requires window to be ordered, please add ORDER BY clause. For example SELECT row_number()(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/window.sql.out b/sql/core/src/test/resources/sql-tests/results/window.sql.out index 57725a2d3e943..dd1f3cdd36a87 100644 --- a/sql/core/src/test/resources/sql-tests/results/window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/window.sql.out @@ -102,7 +102,7 @@ ROWS BETWEEN CURRENT ROW AND 2147483648 FOLLOWING) FROM testData ORDER BY cate, struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'ROWS BETWEEN CURRENT ROW AND 2147483648L FOLLOWING' due to data type mismatch: The data type of the upper bound 'bigint' does not match the expected data type 'int'.; line 1 pos 41 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -324,7 +324,7 @@ ORDER BY cate, val_date struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val_date ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1 02:03:04.001' DAY TO SECOND FOLLOWING)' due to data type mismatch: The data type 'date' used in the order specification does not match the data type 'interval day to second' which is used in the range frame.; line 1 pos 46 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -351,7 +351,7 @@ ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING) FROM testData ORDER BY cate, v struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING' due to data type mismatch: Window frame upper bound '1' does not follow the lower bound 'unboundedfollowing$()'.; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -361,7 +361,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(PARTITION BY testdata.cate RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -371,7 +371,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val ASC NULLS FIRST, testdata.cate ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: val#x ASC NULLS FIRST,cate#x ASC NULLS FIRST; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -381,7 +381,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve '(PARTITION BY testdata.cate ORDER BY current_timestamp() ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -391,7 +391,7 @@ RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING' due to data type mismatch: The lower bound of a window frame must be less than or equal to the upper bound; line 1 pos 33 +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -401,13 +401,7 @@ RANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cat struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -Frame bound value must be a literal.(line 2, pos 30) - -== SQL == -SELECT val, cate, count(val) OVER(PARTITION BY cate ORDER BY val -RANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cate, val -------------------------------^^^ +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -484,7 +478,7 @@ SELECT val, cate, row_number() OVER(PARTITION BY cate) FROM testData ORDER BY ca struct<> -- !query output org.apache.spark.sql.AnalysisException -Window function row_number() requires window to be ordered, please add ORDER BY clause. For example SELECT row_number()(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -556,7 +550,7 @@ FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -window aggregate function with filter predicate is not supported yet. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -910,23 +904,7 @@ ORDER BY salary DESC struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException - -[INVALID_SQL_SYNTAX] Invalid SQL syntax: The definition of window `w` is repetitive.(line 9, pos 0) - -== SQL == -SELECT - employee_name, - salary, - first_value(employee_name) OVER w highest_salary, - any_value(employee_name) OVER w highest_salary, - nth_value(employee_name, 2) OVER w second_highest_salary -FROM - basic_pays -WINDOW -^^^ - w AS (ORDER BY salary DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 1 FOLLOWING), - w AS (ORDER BY salary DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 2 FOLLOWING) -ORDER BY salary DESC +{"errorClass":"INVALID_SQL_SYNTAX","sqlState":"42000","messageParameters":["The definition of window `w` is repetitive."],"queryContext":[]} -- !query @@ -1207,7 +1185,7 @@ FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -Window specification w is not defined in the WINDOW clause. +{"errorClass":null,"messageParameters":[],"queryContext":[]} -- !query @@ -1219,4 +1197,4 @@ FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -Window specification w is not defined in the WINDOW clause. +{"errorClass":null,"messageParameters":[],"queryContext":[]} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala index 987e09adb168e..b40e7ffafbaae 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala @@ -19,7 +19,12 @@ package org.apache.spark.sql import scala.util.control.NonFatal -import org.apache.spark.SparkException +import org.json4s.{JInt, JString} +import org.json4s.JsonAST.{JArray, JObject} +import org.json4s.JsonDSL._ +import org.json4s.jackson.JsonMethods.{compact, render} + +import org.apache.spark.SparkThrowable import org.apache.spark.sql.catalyst.planning.PhysicalOperation import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.execution.HiveResult.hiveResultString @@ -71,6 +76,23 @@ trait SQLQueryTestHelper { if (isSorted(df.queryExecution.analyzed)) (schema, answer) else (schema, answer.sorted) } + private def toJson(e: SparkThrowable): String = { + val jValue = ("errorClass" -> e.getErrorClass) ~ + ("errorSubClass" -> Option(e.getErrorSubClass)) ~ + ("sqlState" -> Option(e.getSqlState)) ~ + ("messageParameters" -> + JArray(e.getMessageParameters.map(JString(_)).toList)) ~ + ("queryContext" -> JArray( + e.getQueryContext.map(c => JObject( + "objectType" -> JString(c.objectType()), + "objectName" -> JString(c.objectName()), + "startIndex" -> JInt(c.startIndex()), + "stopIndex" -> JInt(c.stopIndex()), + "fragment" -> JString(c.fragment()))).toList) + ) + compact(render(jValue)) + } + /** * This method handles exceptions occurred during query execution as they may need special care * to become comparable to the expected output. @@ -81,18 +103,11 @@ trait SQLQueryTestHelper { try { result } catch { - case a: AnalysisException => + case e: SparkThrowable => // Do not output the logical plan tree which contains expression IDs. // Also implement a crude way of masking expression IDs in the error message // with a generic pattern "###". - val msg = if (a.plan.nonEmpty) a.getSimpleMessage else a.getMessage - (emptySchema, Seq(a.getClass.getName, msg.replaceAll("#\\d+", "#x"))) - case s: SparkException if s.getCause != null => - // For a runtime exception, it is hard to match because its message contains - // information of stage, task ID, etc. - // To make result matching simpler, here we match the cause of the exception if it exists. - val cause = s.getCause - (emptySchema, Seq(cause.getClass.getName, cause.getMessage)) + (emptySchema, Seq(e.getClass.getName, toJson(e).replaceAll("#\\d+", "#x"))) case NonFatal(e) => // If there is an exception, put the exception class followed by the message. (emptySchema, Seq(e.getClass.getName, e.getMessage)) From 7936e2ec97c5de5b504f39ac9d069dbcb7a07f01 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Wed, 10 Aug 2022 12:08:26 +0500 Subject: [PATCH 2/4] Set the legacy error class by default --- .../sql-tests/results/ansi/array.sql.out | 30 +- .../sql-tests/results/ansi/cast.sql.out | 18 +- .../sql-tests/results/ansi/date.sql.out | 30 +- .../ansi/datetime-parsing-invalid.sql.out | 2 +- .../ansi/higher-order-functions.sql.out | 2 +- .../sql-tests/results/ansi/interval.sql.out | 212 +++--- .../sql-tests/results/ansi/literals.sql.out | 42 +- .../sql-tests/results/ansi/map.sql.out | 4 +- .../results/ansi/string-functions.sql.out | 16 +- .../sql-tests/results/ansi/timestamp.sql.out | 30 +- .../results/ansi/try_arithmetic.sql.out | 2 +- .../resources/sql-tests/results/array.sql.out | 6 +- .../sql-tests/results/bitwise.sql.out | 8 +- .../resources/sql-tests/results/cast.sql.out | 2 +- .../ceil-floor-with-scale-param.sql.out | 12 +- .../sql-tests/results/change-column.sql.out | 16 +- .../sql-tests/results/charvarchar.sql.out | 4 +- .../results/columnresolution-negative.sql.out | 20 +- .../sql-tests/results/comments.sql.out | 4 +- .../resources/sql-tests/results/count.sql.out | 4 +- .../sql-tests/results/csv-functions.sql.out | 18 +- .../sql-tests/results/cte-legacy.sql.out | 2 +- .../sql-tests/results/cte-nested.sql.out | 16 +- .../resources/sql-tests/results/cte.sql.out | 8 +- .../resources/sql-tests/results/date.sql.out | 32 +- .../datetime-formatting-invalid.sql.out | 30 +- .../datetime-formatting-legacy.sql.out | 12 +- .../sql-tests/results/datetime-legacy.sql.out | 64 +- .../results/datetime-parsing-invalid.sql.out | 2 +- .../sql-tests/results/describe.sql.out | 4 +- .../sql-tests/results/except-all.sql.out | 4 +- .../sql-tests/results/extract.sql.out | 22 +- .../sql-tests/results/group-analytics.sql.out | 8 +- .../sql-tests/results/group-by-filter.sql.out | 6 +- .../results/group-by-ordinal.sql.out | 20 +- .../sql-tests/results/group-by.sql.out | 32 +- .../sql-tests/results/grouping_set.sql.out | 2 +- .../sql-tests/results/having.sql.out | 2 +- .../results/higher-order-functions.sql.out | 2 +- .../sql-tests/results/ilike-all.sql.out | 2 +- .../sql-tests/results/ilike-any.sql.out | 2 +- .../sql-tests/results/inline-table.sql.out | 12 +- .../sql-tests/results/intersect-all.sql.out | 4 +- .../sql-tests/results/interval.sql.out | 212 +++--- .../sql-tests/results/join-lateral.sql.out | 8 +- .../sql-tests/results/json-functions.sql.out | 28 +- .../sql-tests/results/like-all.sql.out | 2 +- .../sql-tests/results/like-any.sql.out | 2 +- .../resources/sql-tests/results/limit.sql.out | 12 +- .../sql-tests/results/literals.sql.out | 42 +- .../resources/sql-tests/results/map.sql.out | 4 +- .../sql-tests/results/misc-functions.sql.out | 28 +- .../results/order-by-ordinal.sql.out | 6 +- .../sql-tests/results/percentiles.sql.out | 24 +- .../resources/sql-tests/results/pivot.sql.out | 6 +- .../postgreSQL/aggregates_part1.sql.out | 2 +- .../postgreSQL/aggregates_part3.sql.out | 2 +- .../results/postgreSQL/boolean.sql.out | 2 +- .../results/postgreSQL/create_view.sql.out | 34 +- .../sql-tests/results/postgreSQL/date.sql.out | 30 +- .../sql-tests/results/postgreSQL/int4.sql.out | 24 +- .../sql-tests/results/postgreSQL/int8.sql.out | 14 +- .../results/postgreSQL/interval.sql.out | 20 +- .../sql-tests/results/postgreSQL/join.sql.out | 4 +- .../results/postgreSQL/limit.sql.out | 4 +- .../results/postgreSQL/numeric.sql.out | 12 +- .../results/postgreSQL/select_having.sql.out | 6 +- .../postgreSQL/select_implicit.sql.out | 8 +- .../results/postgreSQL/strings.sql.out | 16 +- .../sql-tests/results/postgreSQL/text.sql.out | 8 +- .../results/postgreSQL/window_part2.sql.out | 8 +- .../results/postgreSQL/window_part3.sql.out | 20 +- .../results/postgreSQL/window_part4.sql.out | 2 +- .../sql-tests/results/postgreSQL/with.sql.out | 8 +- .../sql-tests/results/random.sql.out | 4 +- .../results/regexp-functions.sql.out | 36 +- .../sql-tests/results/show-tables.sql.out | 8 +- .../sql-tests/results/show-views.sql.out | 2 +- .../sql-tests/results/show_columns.sql.out | 10 +- .../sql-compatibility-functions.sql.out | 2 +- .../results/string-functions.sql.out | 16 +- .../subquery/in-subquery/in-basic.sql.out | 2 +- .../invalid-correlation.sql.out | 10 +- .../subq-input-typecheck.sql.out | 10 +- .../sql-tests/results/table-aliases.sql.out | 4 +- .../results/table-valued-functions.sql.out | 10 +- .../results/tablesample-negative.sql.out | 4 +- .../sql-tests/results/timestamp-ntz.sql.out | 2 +- .../sql-tests/results/timestamp.sql.out | 32 +- .../timestampNTZ/timestamp-ansi.sql.out | 36 +- .../results/timestampNTZ/timestamp.sql.out | 38 +- .../sql-tests/results/timezone.sql.out | 14 +- .../sql-tests/results/transform.sql.out | 2 +- .../sql-tests/results/try_arithmetic.sql.out | 2 +- .../native/booleanEquality.sql.out | 48 +- .../native/caseWhenCoercion.sql.out | 140 ++-- .../native/dateTimeOperations.sql.out | 54 +- .../native/decimalPrecision.sql.out | 704 +++++++++--------- .../typeCoercion/native/division.sql.out | 160 ++-- .../typeCoercion/native/ifCoercion.sql.out | 140 ++-- .../typeCoercion/native/inConversion.sql.out | 280 +++---- .../typeCoercion/native/mapZipWith.sql.out | 4 +- .../typeCoercion/native/mapconcat.sql.out | 10 +- .../native/promoteStrings.sql.out | 94 +-- .../native/stringCastAndExpressions.sql.out | 6 +- .../native/widenSetOperationTypes.sql.out | 140 ++-- .../native/windowFrameCoercion.sql.out | 8 +- .../resources/sql-tests/results/udaf.sql.out | 4 +- .../postgreSQL/udf-aggregates_part1.sql.out | 2 +- .../postgreSQL/udf-aggregates_part3.sql.out | 2 +- .../results/udf/postgreSQL/udf-join.sql.out | 4 +- .../udf/postgreSQL/udf-select_having.sql.out | 6 +- .../postgreSQL/udf-select_implicit.sql.out | 8 +- .../results/udf/udf-except-all.sql.out | 4 +- .../sql-tests/results/udf/udf-except.sql.out | 2 +- .../results/udf/udf-group-analytics.sql.out | 4 +- .../results/udf/udf-group-by.sql.out | 26 +- .../results/udf/udf-inline-table.sql.out | 12 +- .../results/udf/udf-intersect-all.sql.out | 4 +- .../sql-tests/results/udf/udf-pivot.sql.out | 6 +- .../sql-tests/results/udf/udf-udaf.sql.out | 4 +- .../sql-tests/results/udf/udf-window.sql.out | 16 +- .../sql-tests/results/window.sql.out | 24 +- .../apache/spark/sql/SQLQueryTestHelper.scala | 28 +- 124 files changed, 1788 insertions(+), 1768 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out index 0337f7421073d..18ac5bfbd618d 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out @@ -128,7 +128,7 @@ select sort_array(array('b', 'd'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'sort_array(array('b', 'd'), '1')' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select sort_array(array('b', 'd'), cast(NULL as boolean)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'sort_array(array('b', 'd'), CAST(NULL AS BOOLEAN))' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -191,8 +191,8 @@ select elt(4, '123', '456') -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArrayIndexOutOfBoundsException +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["4","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"elt(4, '123', '456'"}]} -- !query @@ -200,8 +200,8 @@ select elt(0, '123', '456') -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArrayIndexOutOfBoundsException +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["0","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"elt(0, '123', '456'"}]} -- !query @@ -209,8 +209,8 @@ select elt(-1, '123', '456') -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArrayIndexOutOfBoundsException +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["-1","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"elt(-1, '123', '456'"}]} -- !query @@ -301,7 +301,7 @@ select array_size(map('a', 1, 'b', 2)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'array_size(map('a', 1, 'b', 2))' due to data type mismatch: argument 1 requires array type, however, 'map('a', 1, 'b', 2)' is of map type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -360,8 +360,8 @@ select elt(4, '123', '456') -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArrayIndexOutOfBoundsException +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["4","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"elt(4, '123', '456'"}]} -- !query @@ -369,8 +369,8 @@ select elt(0, '123', '456') -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArrayIndexOutOfBoundsException +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["0","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"elt(0, '123', '456'"}]} -- !query @@ -378,5 +378,5 @@ select elt(-1, '123', '456') -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArrayIndexOutOfBoundsException +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["-1","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"elt(-1, '123', '456'"}]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out index 469f775932e3c..2ea4fededd3d2 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out @@ -259,7 +259,7 @@ SELECT HEX(CAST(CAST(123 AS byte) AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(CAST(123 AS TINYINT) AS BINARY)' due to data type mismatch: \n cannot cast tinyint to binary with ANSI mode on.\n If you have to cast tinyint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} -- !query @@ -268,7 +268,7 @@ SELECT HEX(CAST(CAST(-123 AS byte) AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(CAST(-123 AS TINYINT) AS BINARY)' due to data type mismatch: \n cannot cast tinyint to binary with ANSI mode on.\n If you have to cast tinyint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT HEX(CAST(123S AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(123S AS BINARY)' due to data type mismatch: \n cannot cast smallint to binary with ANSI mode on.\n If you have to cast smallint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT HEX(CAST(-123S AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(-123S AS BINARY)' due to data type mismatch: \n cannot cast smallint to binary with ANSI mode on.\n If you have to cast smallint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT HEX(CAST(123 AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(123 AS BINARY)' due to data type mismatch: \n cannot cast int to binary with ANSI mode on.\n If you have to cast int to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT HEX(CAST(-123 AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(-123 AS BINARY)' due to data type mismatch: \n cannot cast int to binary with ANSI mode on.\n If you have to cast int to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} -- !query @@ -313,7 +313,7 @@ SELECT HEX(CAST(123L AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(123L AS BINARY)' due to data type mismatch: \n cannot cast bigint to binary with ANSI mode on.\n If you have to cast bigint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} -- !query @@ -322,7 +322,7 @@ SELECT HEX(CAST(-123L AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(-123L AS BINARY)' due to data type mismatch: \n cannot cast bigint to binary with ANSI mode on.\n If you have to cast bigint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} -- !query @@ -380,7 +380,7 @@ SELECT CAST(interval 3 month 1 hour AS string) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval 3 month 1 hour(line 1, pos 12)\n\n== SQL ==\nSELECT CAST(interval 3 month 1 hour AS string)\n------------^^^\n"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index c746fa1cc1627..924f2244d40b3 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -21,7 +21,7 @@ select date '2020-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 2020-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect date '2020-01-01中文'\n-------^^^\n"],"queryContext":[]} -- !query @@ -38,7 +38,7 @@ select make_date(2000, 13, 1) struct<> -- !query output java.time.DateTimeException -Invalid value for MonthOfYear (valid values 1 - 12): 13. If necessary set spark.sql.ansi.enabled to false to bypass this error. +{"errorClass":"legacy","messageParameters":["Invalid value for MonthOfYear (valid values 1 - 12): 13. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} -- !query @@ -47,7 +47,7 @@ select make_date(2000, 1, 33) struct<> -- !query output java.time.DateTimeException -Invalid value for DayOfMonth (valid values 1 - 28/31): 33. If necessary set spark.sql.ansi.enabled to false to bypass this error. +{"errorClass":"legacy","messageParameters":["Invalid value for DayOfMonth (valid values 1 - 28/31): 33. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} -- !query @@ -56,7 +56,7 @@ select date'015' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 015(line 1, pos 7)\n\n== SQL ==\nselect date'015'\n-------^^^\n"],"queryContext":[]} -- !query @@ -65,7 +65,7 @@ select date'2021-4294967297-11' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)\n\n== SQL ==\nselect date'2021-4294967297-11'\n-------^^^\n"],"queryContext":[]} -- !query @@ -181,7 +181,7 @@ select next_day("2015-07-23", "xx") struct<> -- !query output java.lang.IllegalArgumentException -Illegal input for day of week: xx. If necessary set spark.sql.ansi.enabled to false to bypass this error. +{"errorClass":"legacy","messageParameters":["Illegal input for day of week: xx. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} -- !query @@ -271,7 +271,7 @@ select date_add('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -280,7 +280,7 @@ select date_add('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -289,7 +289,7 @@ select date_add('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -379,7 +379,7 @@ select date_sub('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -388,7 +388,7 @@ select date_sub('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -397,7 +397,7 @@ select date_sub('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -487,7 +487,7 @@ select date '2011-11-11' + 1E1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -576,7 +576,7 @@ select date'2011-11-11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DATE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DATE)' is of date type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -585,7 +585,7 @@ select '1' + date'2011-11-11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('1' AS DATE), DATE '2011-11-11')' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'DATE '2011-11-11'' is of date type.; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out index e554d681cdd2a..333663a2308f3 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out @@ -5,7 +5,7 @@ select to_timestamp('294248', 'y') struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out index 2d1d9aedc6d17..ba2603a50e443 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out @@ -17,7 +17,7 @@ select upper(x -> x) as v struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["A lambda function should only be used in a higher order function. However, its class is org.apache.spark.sql.catalyst.expressions.Upper, which is not a higher order function.; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index d4da6adab44a2..7593d47bf931a 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -13,7 +13,7 @@ select interval 4 month 2 weeks 3 microseconds * 1.5 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval 4 month 2 weeks 3 microseconds(line 1, pos 7)\n\n== SQL ==\nselect interval 4 month 2 weeks 3 microseconds * 1.5\n-------^^^\n"],"queryContext":[]} -- !query @@ -46,7 +46,7 @@ select interval 2147483647 month * 2 struct<> -- !query output java.lang.ArithmeticException -integer overflow +{"errorClass":"legacy","messageParameters":["integer overflow"],"queryContext":[]} -- !query @@ -55,7 +55,7 @@ select interval 2147483647 month / 0.5 struct<> -- !query output java.lang.ArithmeticException -Overflow +{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} -- !query @@ -64,7 +64,7 @@ select interval 2147483647 day * 2 struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -73,7 +73,7 @@ select interval 2147483647 day / 0.5 struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -184,7 +184,7 @@ select '2' / interval 2 second struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('2' / INTERVAL '02' SECOND)' due to data type mismatch: differing types in '('2' / INTERVAL '02' SECOND)' (string and interval second).; line 1 pos 7"],"queryContext":[]} -- !query @@ -193,7 +193,7 @@ select '2' / interval 2 year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('2' / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '('2' / INTERVAL '2' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} -- !query @@ -268,7 +268,7 @@ select 2 / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(2 / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(2 / INTERVAL '2' YEAR)' (int and interval year).; line 1 pos 7"],"queryContext":[]} -- !query @@ -277,7 +277,7 @@ select 2 / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(2 / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(2 / INTERVAL '02' HOUR)' (int and interval hour).; line 1 pos 7"],"queryContext":[]} -- !query @@ -286,7 +286,7 @@ select null / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(NULL / INTERVAL '2' YEAR)' (void and interval year).; line 1 pos 7"],"queryContext":[]} -- !query @@ -295,7 +295,7 @@ select null / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(NULL / INTERVAL '02' HOUR)' (void and interval hour).; line 1 pos 7"],"queryContext":[]} -- !query @@ -304,7 +304,7 @@ select -interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect -interval '-1 month 1 day -1 second'\n--------^^^\n"],"queryContext":[]} -- !query @@ -329,7 +329,7 @@ select -interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect -interval -1 month 1 day -1 second\n--------^^^\n"],"queryContext":[]} -- !query @@ -354,7 +354,7 @@ select +interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect +interval '-1 month 1 day -1 second'\n--------^^^\n"],"queryContext":[]} -- !query @@ -379,7 +379,7 @@ select +interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect +interval -1 month 1 day -1 second\n--------^^^\n"],"queryContext":[]} -- !query @@ -661,7 +661,7 @@ select make_dt_interval(2147483647) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -702,7 +702,7 @@ select make_ym_interval(178956970, 8) struct<> -- !query output java.lang.ArithmeticException -integer overflow +{"errorClass":"legacy","messageParameters":["integer overflow"],"queryContext":[]} -- !query @@ -719,7 +719,7 @@ select make_ym_interval(-178956970, -9) struct<> -- !query output java.lang.ArithmeticException -integer overflow +{"errorClass":"legacy","messageParameters":["integer overflow"],"queryContext":[]} -- !query @@ -792,7 +792,7 @@ select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisec struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond(line 1, pos 7)\n\n== SQL ==\nselect interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond\n-------^^^\n"],"queryContext":[]} -- !query @@ -825,7 +825,7 @@ select interval '30' year '25' month '-100' day '40' hour '80' minute '299.88998 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second(line 1, pos 7)\n\n== SQL ==\nselect interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second\n-------^^^\n"],"queryContext":[]} -- !query @@ -954,7 +954,7 @@ select interval '20 15:40:32.99899999' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to hour\n----------------^^^\n"],"queryContext":[]} -- !query @@ -963,7 +963,7 @@ select interval '20 15:40:32.99899999' day to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to minute\n----------------^^^\n"],"queryContext":[]} -- !query @@ -972,7 +972,7 @@ select interval '15:40:32.99899999' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40:32.99899999' hour to minute\n----------------^^^\n"],"queryContext":[]} -- !query @@ -981,7 +981,7 @@ select interval '15:40.99899999' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40.99899999' hour to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -990,7 +990,7 @@ select interval '15:40' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40' hour to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -999,7 +999,7 @@ select interval '20 40:32.99899999' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 40:32.99899999' minute to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1008,7 +1008,7 @@ select interval 10 nanoseconds struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nError parsing ' 10 nanoseconds' to interval, invalid unit 'nanoseconds'(line 1, pos 16)\n\n== SQL ==\nselect interval 10 nanoseconds\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1097,7 +1097,7 @@ select interval struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nat least one time unit should be given for interval literal(line 1, pos 7)\n\n== SQL ==\nselect interval\n-------^^^\n"],"queryContext":[]} -- !query @@ -1106,7 +1106,7 @@ select interval 1 fake_unit struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nError parsing ' 1 fake_unit' to interval, invalid unit 'fake_unit'(line 1, pos 16)\n\n== SQL ==\nselect interval 1 fake_unit\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1115,7 +1115,7 @@ select interval 1 year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nThe value of from-to unit must be a string(line 1, pos 16)\n\n== SQL ==\nselect interval 1 year to month\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1124,7 +1124,7 @@ select interval '1' year to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nIntervals FROM year TO second are not supported.(line 1, pos 16)\n\n== SQL ==\nselect interval '1' year to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1133,7 +1133,7 @@ select interval '10-9' year to month '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '2-1' year to month\n-------------------------------------^^^\n"],"queryContext":[]} -- !query @@ -1142,7 +1142,7 @@ select interval '10-9' year to month '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '12:11:10' hour to second\n-------------------------------------^^^\n"],"queryContext":[]} -- !query @@ -1151,7 +1151,7 @@ select interval '1 15:11' day to minute '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 40)\n\n== SQL ==\nselect interval '1 15:11' day to minute '12:11:10' hour to second\n----------------------------------------^^^\n"],"queryContext":[]} -- !query @@ -1160,7 +1160,7 @@ select interval 1 year '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '2-1' year to month\n-----------------------^^^\n"],"queryContext":[]} -- !query @@ -1169,7 +1169,7 @@ select interval 1 year '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '12:11:10' hour to second\n-----------------------^^^\n"],"queryContext":[]} -- !query @@ -1178,7 +1178,7 @@ select interval '10-9' year to month '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '1' year\n-------------------------------------^^^\n"],"queryContext":[]} -- !query @@ -1187,7 +1187,7 @@ select interval '12:11:10' hour to second '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 42)\n\n== SQL ==\nselect interval '12:11:10' hour to second '1' year\n------------------------------------------^^^\n"],"queryContext":[]} -- !query @@ -1196,7 +1196,7 @@ select interval (-30) day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1205,7 +1205,7 @@ select interval (a + 1) day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1223,7 +1223,7 @@ select interval (-30) days struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1232,7 +1232,7 @@ select interval (a + 1) days struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1258,7 +1258,7 @@ SELECT INTERVAL '178956970-8' YEAR TO MONTH struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nError parsing interval year-month string: integer overflow(line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '178956970-8' YEAR TO MONTH\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1303,7 +1303,7 @@ select struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR + '3-3 year to month')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + '3-3 year to month')' (interval year and string).; line 2 pos 2"],"queryContext":[]} -- !query @@ -1328,7 +1328,7 @@ select interval '2' year + '3-3' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR + '3-3')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + '3-3')' (interval year and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1337,7 +1337,7 @@ select interval '2' year - '4' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR - '4')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - '4')' (interval year and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1372,7 +1372,7 @@ select interval '2' year + str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR + interval_view.str)' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + interval_view.str)' (interval year and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1381,7 +1381,7 @@ select interval '2' year - str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR - interval_view.str)' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - interval_view.str)' (interval year and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1408,7 +1408,7 @@ select interval '2-2' year to month + interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1417,7 +1417,7 @@ select interval '3' day + interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1426,7 +1426,7 @@ select interval '2-2' year to month - interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1435,7 +1435,7 @@ select interval '3' day - interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' due to data type mismatch: differing types in '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' (interval day and interval year to month).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1444,7 +1444,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1453,7 +1453,7 @@ select 1 + interval '2' month struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(1 + INTERVAL '2' MONTH)' due to data type mismatch: differing types in '(1 + INTERVAL '2' MONTH)' (int and interval month).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1462,7 +1462,7 @@ select interval '2' second + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '1 + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1471,7 +1471,7 @@ select interval '2' month - 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' MONTH - 1)' due to data type mismatch: differing types in '(INTERVAL '2' MONTH - 1)' (interval month and int).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1528,7 +1528,7 @@ select interval '-\t2-2\t' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match year-month format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: -\t2-2\t(line 1, pos 16)\n\n== SQL ==\nselect interval '-\\t2-2\\t' year to month\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1545,7 +1545,7 @@ select interval '\n-\t10\t 12:34:46.789\t' day to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: \n-\t10\t 12:34:46.789\t, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '\\n-\\t10\\t 12:34:46.789\\t' day to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1554,7 +1554,7 @@ select interval '中文 interval 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 中文 interval 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval '中文 interval 1 day'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1563,7 +1563,7 @@ select interval 'interval中文 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: interval中文 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval中文 1 day'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1572,7 +1572,7 @@ select interval 'interval 1中文day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: interval 1中文day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval 1中文day'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1580,8 +1580,8 @@ select -(a) from values (interval '-2147483648 months', interval '2147483647 mon -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow",""],"queryContext":[]} -- !query @@ -1589,8 +1589,8 @@ select a - b from values (interval '-2147483648 months', interval '2147483647 mo -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_subtract' to tolerate overflow and return NULL instead."],"queryContext":[]} -- !query @@ -1598,8 +1598,8 @@ select b + interval '1 month' from values (interval '-2147483648 months', interv -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_add' to tolerate overflow and return NULL instead."],"queryContext":[]} -- !query @@ -1607,8 +1607,8 @@ select a * 1.1 from values (interval '-2147483648 months', interval '2147483647 -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +java.lang.ArithmeticException +{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} -- !query @@ -1616,8 +1616,8 @@ select a / 0.5 from values (interval '-2147483648 months', interval '2147483647 -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +java.lang.ArithmeticException +{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} -- !query @@ -1678,7 +1678,7 @@ select interval '+' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: +(line 1, pos 7)\n\n== SQL ==\nselect interval '+'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1687,7 +1687,7 @@ select interval '+.' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: +.(line 1, pos 7)\n\n== SQL ==\nselect interval '+.'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1696,7 +1696,7 @@ select interval '1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1705,7 +1705,7 @@ select interval '1.2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1.2(line 1, pos 7)\n\n== SQL ==\nselect interval '1.2'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1714,7 +1714,7 @@ select interval '- 2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: - 2(line 1, pos 7)\n\n== SQL ==\nselect interval '- 2'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1723,7 +1723,7 @@ select interval '1 day -' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1 day -(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day -'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1732,7 +1732,7 @@ select interval '1 day 1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1 day 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day 1'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1741,7 +1741,7 @@ select interval '1 day 2' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: 1 day 2(line 1, pos 16)\n\n== SQL ==\nselect interval '1 day 2' day\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1750,7 +1750,7 @@ select interval 'interval 1' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: interval 1(line 1, pos 16)\n\n== SQL ==\nselect interval 'interval 1' day\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1801,7 +1801,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1.0 struct<> -- !query output java.lang.ArithmeticException -Overflow +{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} -- !query @@ -1810,7 +1810,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1.0D struct<> -- !query output java.lang.ArithmeticException -not in range +{"errorClass":"legacy","messageParameters":["not in range"],"queryContext":[]} -- !query @@ -1853,7 +1853,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0 struct<> -- !query output java.lang.ArithmeticException -Overflow +{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} -- !query @@ -1862,7 +1862,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0D struct<> -- !query output java.lang.ArithmeticException -not in range +{"errorClass":"legacy","messageParameters":["not in range"],"queryContext":[]} -- !query @@ -1967,7 +1967,7 @@ SELECT INTERVAL '106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '106751992 04' DAY TO HOUR\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1976,7 +1976,7 @@ SELECT INTERVAL '-106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-106751992 04' DAY TO HOUR\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1985,7 +1985,7 @@ SELECT INTERVAL '2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '2562047789:00' HOUR TO MINUTE\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1994,7 +1994,7 @@ SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-2562047789:00' HOUR TO MINUTE\n----------------^^^\n"],"queryContext":[]} -- !query @@ -2003,7 +2003,7 @@ SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n"],"queryContext":[]} -- !query @@ -2012,7 +2012,7 @@ SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n"],"queryContext":[]} -- !query @@ -2149,7 +2149,7 @@ SELECT INTERVAL 1 MONTH > INTERVAL 20 DAYS struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' (interval month and interval day).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2158,7 +2158,7 @@ SELECT INTERVAL '1' DAY < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' DAY < '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY < '1')' (interval day and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2167,7 +2167,7 @@ SELECT INTERVAL '1' DAY = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' DAY = '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY = '1')' (interval day and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2176,7 +2176,7 @@ SELECT INTERVAL '1' DAY > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' DAY > '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY > '1')' (interval day and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2185,7 +2185,7 @@ SELECT '1' < INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' < INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' < INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2194,7 +2194,7 @@ SELECT '1' = INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' = INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' = INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2203,7 +2203,7 @@ SELECT '1' > INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' > INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' > INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2212,7 +2212,7 @@ SELECT INTERVAL '1' YEAR < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' YEAR < '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR < '1')' (interval year and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2221,7 +2221,7 @@ SELECT INTERVAL '1' YEAR = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' YEAR = '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR = '1')' (interval year and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2230,7 +2230,7 @@ SELECT INTERVAL '1' YEAR > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' YEAR > '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR > '1')' (interval year and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2239,7 +2239,7 @@ SELECT '1' < INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' < INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' < INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2248,7 +2248,7 @@ SELECT '1' = INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' = INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' = INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2257,7 +2257,7 @@ SELECT '1' > INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' > INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' > INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2282,7 +2282,7 @@ SELECT array(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'array(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function array should all be the same type, but it's [interval month, interval day]; line 1 pos 7"],"queryContext":[]} -- !query @@ -2307,7 +2307,7 @@ SELECT coalesce(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function coalesce should all be the same type, but it's [interval month, interval day]; line 1 pos 7"],"queryContext":[]} -- !query @@ -2364,7 +2364,7 @@ SELECT div(INTERVAL '1' MONTH, INTERVAL '-1' DAY) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' (interval month and interval day).; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out index b40cf817fb328..3085fdaeac7b1 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out @@ -37,7 +37,7 @@ select 128Y struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nNumeric literal 128 does not fit in range [-128, 127] for type tinyint(line 1, pos 7)\n\n== SQL ==\nselect 128Y\n-------^^^\n"],"queryContext":[]} -- !query @@ -62,7 +62,7 @@ select 32768S struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nNumeric literal 32768 does not fit in range [-32768, 32767] for type smallint(line 1, pos 7)\n\n== SQL ==\nselect 32768S\n-------^^^\n"],"queryContext":[]} -- !query @@ -87,7 +87,7 @@ select 9223372036854775808L struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nNumeric literal 9223372036854775808 does not fit in range [-9223372036854775808, 9223372036854775807] for type bigint(line 1, pos 7)\n\n== SQL ==\nselect 9223372036854775808L\n-------^^^\n"],"queryContext":[]} -- !query @@ -128,7 +128,7 @@ select 1234567890123456789012345678901234567890 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890"],"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select 1234567890123456789012345678901234567890.0 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890.0"],"queryContext":[]} -- !query @@ -162,7 +162,7 @@ select -3.4028235E39f struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nNumeric literal -3.4028235E39 does not fit in range [-3.4028234663852886E+38, 3.4028234663852886E+38] for type float(line 1, pos 7)\n\n== SQL ==\nselect -3.4028235E39f\n-------^^^\n"],"queryContext":[]} -- !query @@ -196,7 +196,7 @@ select 1E309, -1E309 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nNumeric literal 1E309 does not fit in range [-1.7976931348623157E+308, 1.7976931348623157E+308] for type double(line 1, pos 7)\n\n== SQL ==\nselect 1E309, -1E309\n-------^^^\n"],"queryContext":[]} -- !query @@ -295,7 +295,7 @@ select date 'mar 11 2016' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: mar 11 2016(line 1, pos 7)\n\n== SQL ==\nselect date 'mar 11 2016'\n-------^^^\n"],"queryContext":[]} -- !query @@ -312,7 +312,7 @@ select timestamp '2016-33-11 20:54:00.000' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2016-33-11 20:54:00.000(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2016-33-11 20:54:00.000'\n-------^^^\n"],"queryContext":[]} -- !query @@ -321,7 +321,7 @@ select GEO '(10,-6)' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nLiterals of type 'GEO' are currently not supported.(line 1, pos 7)\n\n== SQL ==\nselect GEO '(10,-6)'\n-------^^^\n"],"queryContext":[]} -- !query @@ -338,7 +338,7 @@ select 1.20E-38BD struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38(line 1, pos 7)\n\n== SQL ==\nselect 1.20E-38BD\n-------^^^\n"],"queryContext":[]} -- !query @@ -355,7 +355,7 @@ select X'XuZ' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\ncontains illegal character for hexBinary: 0XuZ(line 1, pos 7)\n\n== SQL ==\nselect X'XuZ'\n-------^^^\n"],"queryContext":[]} -- !query @@ -372,7 +372,7 @@ select +date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(+ DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -381,7 +381,7 @@ select +timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -398,7 +398,7 @@ select +map(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(+ map(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'map(1, 2)' is of map type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -407,7 +407,7 @@ select +array(1,2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(+ array(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'array(1, 2)' is of array type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -416,7 +416,7 @@ select +named_struct('a', 1, 'b', 'spark') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(+ named_struct('a', 1, 'b', 'spark'))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'named_struct('a', 1, 'b', 'spark')' is of struct type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -425,7 +425,7 @@ select +X'1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(+ X'01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'01'' is of binary type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -434,7 +434,7 @@ select -date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(- DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -443,7 +443,7 @@ select -timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(- TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -452,4 +452,4 @@ select -x'2379ACFe' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(- X'2379ACFE')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'2379ACFE'' is of binary type.; line 1 pos 7"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out index 0b2d09734dab1..41ab62980d8f0 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out @@ -71,7 +71,7 @@ select map_contains_key(map('1', 'a', '2', 'b'), 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'map_contains_key(map('1', 'a', '2', 'b'), 1)' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, int].; line 1 pos 7"],"queryContext":[]} -- !query @@ -80,7 +80,7 @@ select map_contains_key(map(1, 'a', 2, 'b'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'map_contains_key(map(1, 'a', 2, 'b'), '1')' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, string].; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out index c4f09a811da7e..b4f57d5f23123 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out @@ -5,7 +5,7 @@ select concat_ws() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["requirement failed: concat_ws requires at least one argument.; line 1 pos 7"],"queryContext":[]} -- !query @@ -14,7 +14,7 @@ select format_string() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["requirement failed: format_string() should take at least 1 argument; line 1 pos 7"],"queryContext":[]} -- !query @@ -652,7 +652,7 @@ select decode() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7"],"queryContext":[]} -- !query @@ -661,7 +661,7 @@ select decode(encode('abc', 'utf-8')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7"],"queryContext":[]} -- !query @@ -1126,7 +1126,7 @@ select to_binary(null, cast(null as int)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1135,7 +1135,7 @@ select to_binary('abc', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1144,7 +1144,7 @@ select to_binary('abc', 'invalidFormat') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid value for the 'format' parameter of function 'to_binary': invalidformat. The value has to be a case-insensitive string literal of 'hex', 'utf-8', or 'base64'."],"queryContext":[]} -- !query @@ -1153,4 +1153,4 @@ select to_binary('a!', 'base64') struct<> -- !query output java.lang.IllegalArgumentException -Last unit does not have enough valid bits +{"errorClass":"legacy","messageParameters":["Last unit does not have enough valid bits"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index d697772cbf571..b8fc7da629f7e 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -13,7 +13,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"],"queryContext":[]} -- !query @@ -22,7 +22,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"],"queryContext":[]} -- !query @@ -31,7 +31,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"],"queryContext":[]} -- !query @@ -105,7 +105,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 61) struct<> -- !query output java.time.DateTimeException -Invalid value for SecondOfMinute (valid values 0 - 59): 61. If necessary set spark.sql.ansi.enabled to false to bypass this error. +{"errorClass":"legacy","messageParameters":["Invalid value for SecondOfMinute (valid values 0 - 59): 61. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} -- !query @@ -130,7 +130,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 99.999999) struct<> -- !query output java.time.DateTimeException -Invalid value for SecondOfMinute (valid values 0 - 59): 99. If necessary set spark.sql.ansi.enabled to false to bypass this error. +{"errorClass":"legacy","messageParameters":["Invalid value for SecondOfMinute (valid values 0 - 59): 99. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} -- !query @@ -139,7 +139,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 999.999999) struct<> -- !query output java.time.DateTimeException -Invalid value for SecondOfMinute (valid values 0 - 59): 999. If necessary set spark.sql.ansi.enabled to false to bypass this error. +{"errorClass":"legacy","messageParameters":["Invalid value for SecondOfMinute (valid values 0 - 59): 999. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} -- !query @@ -180,7 +180,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -189,7 +189,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -198,7 +198,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -207,7 +207,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -216,7 +216,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -Rounding necessary +{"errorClass":"legacy","messageParameters":["Rounding necessary"],"queryContext":[]} -- !query @@ -656,7 +656,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP))' due to data type mismatch: '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP))' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp; line 1 pos 7"],"queryContext":[]} -- !query @@ -665,7 +665,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS TIMESTAMP) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: '(CAST('1' AS TIMESTAMP) + TIMESTAMP '2011-11-11 11:11:11')' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp; line 1 pos 7"],"queryContext":[]} -- !query @@ -674,7 +674,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7"],"queryContext":[]} -- !query @@ -683,7 +683,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out index 7396b252e142e..758351f01b2e4 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out @@ -141,7 +141,7 @@ SELECT try_add(interval 2 year, interval 2 second) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2' YEAR + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2' YEAR' is of interval year type.; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/array.sql.out b/sql/core/src/test/resources/sql-tests/results/array.sql.out index c8ca2c6aa6e36..776ebb8598bf5 100644 --- a/sql/core/src/test/resources/sql-tests/results/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/array.sql.out @@ -128,7 +128,7 @@ select sort_array(array('b', 'd'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'sort_array(array('b', 'd'), '1')' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select sort_array(array('b', 'd'), cast(NULL as boolean)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'sort_array(array('b', 'd'), CAST(NULL AS BOOLEAN))' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -294,4 +294,4 @@ select array_size(map('a', 1, 'b', 2)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'array_size(map('a', 1, 'b', 2))' due to data type mismatch: argument 1 requires array type, however, 'map('a', 1, 'b', 2)' is of map type.; line 1 pos 7"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out b/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out index b045e307ce22a..4eb71a3903891 100644 --- a/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out @@ -149,7 +149,7 @@ select bit_count("bit count") struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'bit_count('bit count')' due to data type mismatch: argument 1 requires (integral or boolean) type, however, ''bit count'' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -158,7 +158,7 @@ select bit_count('a') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'bit_count('a')' due to data type mismatch: argument 1 requires (integral or boolean) type, however, ''a'' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -260,7 +260,7 @@ select getbit(11L, -1) struct<> -- !query output java.lang.IllegalArgumentException -Invalid bit position: -1 is less than zero +{"errorClass":"legacy","messageParameters":["Invalid bit position: -1 is less than zero"],"queryContext":[]} -- !query @@ -269,4 +269,4 @@ select getbit(11L, 64) struct<> -- !query output java.lang.IllegalArgumentException -Invalid bit position: 64 exceeds the bit upper limit +{"errorClass":"legacy","messageParameters":["Invalid bit position: 64 exceeds the bit upper limit"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/cast.sql.out index 8e6a5c0404d10..8027c8e4b4de5 100644 --- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out @@ -350,7 +350,7 @@ SELECT CAST(interval 3 month 1 hour AS string) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval 3 month 1 hour(line 1, pos 12)\n\n== SQL ==\nSELECT CAST(interval 3 month 1 hour AS string)\n------------^^^\n"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out index cd76b53163a99..d073a93a4eb15 100644 --- a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out @@ -93,7 +93,7 @@ SELECT CEIL(2.5, null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The 'scale' parameter of function 'ceil' needs to be a int literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -102,7 +102,7 @@ SELECT CEIL(2.5, 'a') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The 'scale' parameter of function 'ceil' needs to be a int literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -111,7 +111,7 @@ SELECT CEIL(2.5, 0, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function ceil. Expected: 2; Found: 3; line 1 pos 7"],"queryContext":[]} -- !query @@ -208,7 +208,7 @@ SELECT FLOOR(2.5, null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The 'scale' parameter of function 'floor' needs to be a int literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -217,7 +217,7 @@ SELECT FLOOR(2.5, 'a') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The 'scale' parameter of function 'floor' needs to be a int literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -226,4 +226,4 @@ SELECT FLOOR(2.5, 0, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function floor. Expected: 2; Found: 3; line 1 pos 7"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out index 3b3b4f0fa8e0f..e9c050bc5704b 100644 --- a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out @@ -23,7 +23,7 @@ ALTER TABLE test_change CHANGE a struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nOperation not allowed: ALTER TABLE table CHANGE COLUMN requires a TYPE, a SET/DROP, a COMMENT, or a FIRST/AFTER(line 1, pos 0)\n\n== SQL ==\nALTER TABLE test_change CHANGE a\n^^^\n"],"queryContext":[]} -- !query @@ -42,7 +42,7 @@ ALTER TABLE test_change RENAME COLUMN a TO a1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["RENAME COLUMN is only supported with v2 tables."],"queryContext":[]} -- !query @@ -61,7 +61,7 @@ ALTER TABLE test_change CHANGE a TYPE STRING struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["ALTER TABLE CHANGE COLUMN is not supported for changing column 'a' with type 'IntegerType' to 'a' with type 'StringType'"],"queryContext":[]} -- !query @@ -80,7 +80,7 @@ ALTER TABLE test_change CHANGE a AFTER b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["ALTER COLUMN ... FIRST | ALTER is only supported with v2 tables."],"queryContext":[]} -- !query @@ -89,7 +89,7 @@ ALTER TABLE test_change CHANGE b FIRST struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["ALTER COLUMN ... FIRST | ALTER is only supported with v2 tables."],"queryContext":[]} -- !query @@ -168,7 +168,7 @@ ALTER TABLE test_change CHANGE invalid_col TYPE INT struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Missing field invalid_col in table spark_catalog.default.test_change with schema:\nroot\n |-- a: integer (nullable = true)\n |-- b: string (nullable = true)\n |-- c: integer (nullable = true)\n; line 1 pos 0"],"queryContext":[]} -- !query @@ -213,7 +213,7 @@ ALTER TABLE temp_view CHANGE a TYPE INT struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["temp_view is a temp view. 'ALTER TABLE ... CHANGE COLUMN' expects a table.; line 1 pos 12"],"queryContext":[]} -- !query @@ -230,7 +230,7 @@ ALTER TABLE global_temp.global_temp_view CHANGE a TYPE INT struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["global_temp.global_temp_view is a temp view. 'ALTER TABLE ... CHANGE COLUMN' expects a table.; line 1 pos 12"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out index b92f7b38ff234..58cb227982e0f 100644 --- a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out @@ -259,7 +259,7 @@ alter table char_tbl1 change column c type char(6) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["ALTER TABLE CHANGE COLUMN is not supported for changing column 'c' with type 'CharType(5)' to 'c' with type 'CharType(6)'"],"queryContext":[]} -- !query @@ -575,7 +575,7 @@ alter table char_part partition (v2='ke') rename to partition (v2='nt') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Partition spec is invalid. The spec (v2) must match the partition spec (v2, c2) defined in table '`spark_catalog`.`default`.`char_part`'"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out index 387f661d005a8..d8d8e366ca878 100644 --- a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out @@ -69,7 +69,7 @@ SELECT i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"],"queryContext":[]} -- !query @@ -78,7 +78,7 @@ SELECT t1.i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"],"queryContext":[]} -- !query @@ -87,7 +87,7 @@ SELECT mydb1.t1.i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'mydb1.t1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"],"queryContext":[]} -- !query @@ -96,7 +96,7 @@ SELECT i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7"],"queryContext":[]} -- !query @@ -105,7 +105,7 @@ SELECT t1.i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7"],"queryContext":[]} -- !query @@ -122,7 +122,7 @@ SELECT i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"],"queryContext":[]} -- !query @@ -131,7 +131,7 @@ SELECT t1.i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"],"queryContext":[]} -- !query @@ -140,7 +140,7 @@ SELECT i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7"],"queryContext":[]} -- !query @@ -149,7 +149,7 @@ SELECT t1.i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7"],"queryContext":[]} -- !query @@ -192,7 +192,7 @@ SELECT t1.x.y.* FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 't1.x.y.*' given input columns 'i1'; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/comments.sql.out b/sql/core/src/test/resources/sql-tests/results/comments.sql.out index a9cdb9161687e..685cc95d3d70f 100644 --- a/sql/core/src/test/resources/sql-tests/results/comments.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/comments.sql.out @@ -132,7 +132,7 @@ select 1 as a struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nUnclosed bracketed comment(line 3, pos 0)\n\n== SQL ==\n/*abc*/\nselect 1 as a\n/*\n^^^\n\n2 as b\n/*abc*/\n, 3 as c\n\n/**/\n"],"queryContext":[]} -- !query @@ -150,4 +150,4 @@ select 4 as d struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nUnclosed bracketed comment(line 3, pos 0)\n\n== SQL ==\n/*abc*/\nselect 1 as a\n/*\n^^^\n\n2 as b\n/*abc*/\n, 3 as c\n\n/**/\nselect 4 as d\n"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/count.sql.out b/sql/core/src/test/resources/sql-tests/results/count.sql.out index 93b7a5eeffe42..dbed5336733a6 100644 --- a/sql/core/src/test/resources/sql-tests/results/count.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/count.sql.out @@ -146,7 +146,7 @@ SELECT count() FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'count()' due to data type mismatch: count requires at least one argument. If you have to call the function count without arguments, set the legacy configuration `spark.sql.legacy.allowParameterlessCount` as true; line 1 pos 7"],"queryContext":[]} -- !query @@ -179,4 +179,4 @@ SELECT count(testData.*) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["count(testData.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2)"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out index 44d91d6d32580..8f28bbc12a4d6 100644 --- a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out @@ -21,7 +21,7 @@ select from_csv('1', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The expression '1' is not a valid schema string.; line 1 pos 7"],"queryContext":[]} -- !query @@ -30,7 +30,7 @@ select from_csv('1', 'a InvalidType') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot parse the data type: \n[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n\nFailed fallback parsing: \nDataType invalidtype is not supported.(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n; line 1 pos 7"],"queryContext":[]} -- !query @@ -39,7 +39,7 @@ select from_csv('1', 'a INT', named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Must use a map() function for options; line 1 pos 7"],"queryContext":[]} -- !query @@ -48,7 +48,7 @@ select from_csv('1', 'a INT', map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["A type of keys and values in map() must be string, but got map; line 1 pos 7"],"queryContext":[]} -- !query @@ -57,7 +57,7 @@ select from_csv() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function from_csv. Expected: one of 2 and 3; Found: 0; line 1 pos 7"],"queryContext":[]} -- !query @@ -82,7 +82,7 @@ select schema_of_csv(null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'schema_of_csv(NULL)' due to data type mismatch: The input csv should be a foldable string expression and not null; however, got NULL.; line 1 pos 7"],"queryContext":[]} -- !query @@ -99,7 +99,7 @@ SELECT schema_of_csv(csvField) FROM csvTable struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'schema_of_csv(csvtable.csvField)' due to data type mismatch: The input csv should be a foldable string expression and not null; however, got csvtable.csvField.; line 1 pos 7"],"queryContext":[]} -- !query @@ -132,7 +132,7 @@ select to_csv(named_struct('a', 1, 'b', 2), named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Must use a map() function for options; line 1 pos 7"],"queryContext":[]} -- !query @@ -141,4 +141,4 @@ select to_csv(named_struct('a', 1, 'b', 2), map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["A type of keys and values in map() must be string, but got map; line 1 pos 7"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out index ddcc30e6597eb..95d33c6ffdddb 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out @@ -232,4 +232,4 @@ SELECT * FROM t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view not found: t1; line 5 pos 20"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out b/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out index 7fb01026703cd..d669e5729c45c 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out @@ -45,7 +45,7 @@ SELECT * FROM t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} -- !query @@ -82,7 +82,7 @@ SELECT * FROM t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} -- !query @@ -136,7 +136,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} -- !query @@ -151,7 +151,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} -- !query @@ -167,7 +167,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} -- !query @@ -181,7 +181,7 @@ WHERE c IN ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} -- !query @@ -210,7 +210,7 @@ SELECT * FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} -- !query @@ -223,7 +223,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/cte.sql.out b/sql/core/src/test/resources/sql-tests/results/cte.sql.out index 5b09d8267fffa..05d485fbaa5db 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte.sql.out @@ -21,7 +21,7 @@ WITH s AS (SELECT 1 FROM s) SELECT * FROM s struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view not found: s; line 1 pos 25"],"queryContext":[]} -- !query @@ -31,7 +31,7 @@ SELECT * FROM r struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view not found: r; line 1 pos 33"],"queryContext":[]} -- !query @@ -50,7 +50,7 @@ WITH s1 AS (SELECT 1 FROM s2), s2 AS (SELECT 1 FROM s1) SELECT * FROM s1, s2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view not found: s2; line 1 pos 26"],"queryContext":[]} -- !query @@ -141,7 +141,7 @@ SELECT * FROM t struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCTE definition can't have duplicate names: 't'.(line 1, pos 0)\n\n== SQL ==\nWITH\n^^^\n t(x) AS (SELECT 1),\n t(x) AS (SELECT 2)\nSELECT * FROM t\n"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out b/sql/core/src/test/resources/sql-tests/results/date.sql.out index 3e29f8fafa81c..9900d9735039f 100644 --- a/sql/core/src/test/resources/sql-tests/results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out @@ -21,7 +21,7 @@ select date '2020-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 2020-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect date '2020-01-01中文'\n-------^^^\n"],"queryContext":[]} -- !query @@ -54,7 +54,7 @@ select date'015' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 015(line 1, pos 7)\n\n== SQL ==\nselect date'015'\n-------^^^\n"],"queryContext":[]} -- !query @@ -63,7 +63,7 @@ select date'2021-4294967297-11' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)\n\n== SQL ==\nselect date'2021-4294967297-11'\n-------^^^\n"],"queryContext":[]} -- !query @@ -266,7 +266,7 @@ select date_add('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -275,7 +275,7 @@ select date_add('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -284,7 +284,7 @@ select date_add('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -374,7 +374,7 @@ select date_sub('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -383,7 +383,7 @@ select date_sub('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -392,7 +392,7 @@ select date_sub('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -450,7 +450,7 @@ select date_add('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -459,7 +459,7 @@ select date_sub('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -484,7 +484,7 @@ select date '2011-11-11' + 1E1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -525,7 +525,7 @@ select date '2001-10-01' - '2001-09-28' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(DATE '2001-10-01', CAST('2001-09-28' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('2001-09-28' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -566,7 +566,7 @@ select date '2001-09-28' - date_str from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(DATE '2001-09-28', CAST(date_view.date_str AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(date_view.date_str AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -575,7 +575,7 @@ select date'2011-11-11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -584,7 +584,7 @@ select '1' + date'2011-11-11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out index 6827bc1546dc2..e9925ee4a9c69 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out @@ -23,7 +23,7 @@ select date_format('2018-11-17 13:33:33.333', 'qqqqq') struct<> -- !query output java.lang.IllegalArgumentException -Too many pattern letters: q +{"errorClass":"legacy","messageParameters":["Too many pattern letters: q"],"queryContext":[]} -- !query @@ -32,7 +32,7 @@ select date_format('2018-11-17 13:33:33.333', 'QQQQQ') struct<> -- !query output java.lang.IllegalArgumentException -Too many pattern letters: Q +{"errorClass":"legacy","messageParameters":["Too many pattern letters: Q"],"queryContext":[]} -- !query @@ -167,7 +167,7 @@ select date_format('2018-11-17 13:33:33.333', 'V') struct<> -- !query output java.lang.IllegalArgumentException -Pattern letter count must be 2: V +{"errorClass":"legacy","messageParameters":["Pattern letter count must be 2: V"],"queryContext":[]} -- !query @@ -185,7 +185,7 @@ select date_format('2018-11-17 13:33:33.333', 'XXXXXX') struct<> -- !query output java.lang.IllegalArgumentException -Too many pattern letters: X +{"errorClass":"legacy","messageParameters":["Too many pattern letters: X"],"queryContext":[]} -- !query @@ -203,7 +203,7 @@ select date_format('2018-11-17 13:33:33.333', 'OO') struct<> -- !query output java.lang.IllegalArgumentException -Pattern letter count must be 1 or 4: O +{"errorClass":"legacy","messageParameters":["Pattern letter count must be 1 or 4: O"],"queryContext":[]} -- !query @@ -212,7 +212,7 @@ select date_format('2018-11-17 13:33:33.333', 'xxxxxx') struct<> -- !query output java.lang.IllegalArgumentException -Too many pattern letters: x +{"errorClass":"legacy","messageParameters":["Too many pattern letters: x"],"queryContext":[]} -- !query @@ -221,7 +221,7 @@ select date_format('2018-11-17 13:33:33.333', 'A') struct<> -- !query output java.lang.IllegalArgumentException -Illegal pattern character: A +{"errorClass":"legacy","messageParameters":["Illegal pattern character: A"],"queryContext":[]} -- !query @@ -230,7 +230,7 @@ select date_format('2018-11-17 13:33:33.333', 'n') struct<> -- !query output java.lang.IllegalArgumentException -Illegal pattern character: n +{"errorClass":"legacy","messageParameters":["Illegal pattern character: n"],"queryContext":[]} -- !query @@ -239,7 +239,7 @@ select date_format('2018-11-17 13:33:33.333', 'N') struct<> -- !query output java.lang.IllegalArgumentException -Illegal pattern character: N +{"errorClass":"legacy","messageParameters":["Illegal pattern character: N"],"queryContext":[]} -- !query @@ -248,7 +248,7 @@ select date_format('2018-11-17 13:33:33.333', 'p') struct<> -- !query output java.lang.IllegalArgumentException -Illegal pattern character: p +{"errorClass":"legacy","messageParameters":["Illegal pattern character: p"],"queryContext":[]} -- !query @@ -293,7 +293,7 @@ select date_format('2018-11-17 13:33:33.333', 'e') struct<> -- !query output java.lang.IllegalArgumentException -All week-based patterns are unsupported since Spark 3.0, detected: e, Please use the SQL function EXTRACT instead +{"errorClass":"legacy","messageParameters":["All week-based patterns are unsupported since Spark 3.0, detected: e, Please use the SQL function EXTRACT instead"],"queryContext":[]} -- !query @@ -302,7 +302,7 @@ select date_format('2018-11-17 13:33:33.333', 'c') struct<> -- !query output java.lang.IllegalArgumentException -All week-based patterns are unsupported since Spark 3.0, detected: c, Please use the SQL function EXTRACT instead +{"errorClass":"legacy","messageParameters":["All week-based patterns are unsupported since Spark 3.0, detected: c, Please use the SQL function EXTRACT instead"],"queryContext":[]} -- !query @@ -311,7 +311,7 @@ select date_format('2018-11-17 13:33:33.333', 'B') struct<> -- !query output java.lang.IllegalArgumentException -Illegal pattern character: B +{"errorClass":"legacy","messageParameters":["Illegal pattern character: B"],"queryContext":[]} -- !query @@ -320,7 +320,7 @@ select date_format('2018-11-17 13:33:33.333', 'C') struct<> -- !query output java.lang.IllegalArgumentException -Unknown pattern letter: C +{"errorClass":"legacy","messageParameters":["Unknown pattern letter: C"],"queryContext":[]} -- !query @@ -329,4 +329,4 @@ select date_format('2018-11-17 13:33:33.333', 'I') struct<> -- !query output java.lang.IllegalArgumentException -Unknown pattern letter: I +{"errorClass":"legacy","messageParameters":["Unknown pattern letter: I"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-legacy.sql.out index ed43af9cb518e..1cb30fb3edc15 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-legacy.sql.out @@ -48,7 +48,7 @@ select col, date_format(col, 'q qq') from v struct<> -- !query output java.lang.IllegalArgumentException -Illegal pattern character 'q' +{"errorClass":"legacy","messageParameters":["Illegal pattern character 'q'"],"queryContext":[]} -- !query @@ -57,7 +57,7 @@ select col, date_format(col, 'Q QQ QQQ QQQQ') from v struct<> -- !query output java.lang.IllegalArgumentException -Illegal pattern character 'Q' +{"errorClass":"legacy","messageParameters":["Illegal pattern character 'Q'"],"queryContext":[]} -- !query @@ -270,7 +270,7 @@ select col, date_format(col, 'VV') from v struct<> -- !query output java.lang.IllegalArgumentException -Illegal pattern character 'V' +{"errorClass":"legacy","messageParameters":["Illegal pattern character 'V'"],"queryContext":[]} -- !query @@ -307,7 +307,7 @@ select col, date_format(col, 'XXXX XXXXX') from v struct<> -- !query output java.lang.IllegalArgumentException -invalid ISO 8601 format: length=4 +{"errorClass":"legacy","messageParameters":["invalid ISO 8601 format: length=4"],"queryContext":[]} -- !query @@ -330,7 +330,7 @@ select col, date_format(col, 'O OOOO') from v struct<> -- !query output java.lang.IllegalArgumentException -Illegal pattern character 'O' +{"errorClass":"legacy","messageParameters":["Illegal pattern character 'O'"],"queryContext":[]} -- !query @@ -339,7 +339,7 @@ select col, date_format(col, 'x xx xxx xxxx xxxx xxxxx') from v struct<> -- !query output java.lang.IllegalArgumentException -Illegal pattern character 'x' +{"errorClass":"legacy","messageParameters":["Illegal pattern character 'x'"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index e5dd1ddbef80e..075bd01e73d8e 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -21,7 +21,7 @@ select date '2020-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 2020-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect date '2020-01-01中文'\n-------^^^\n"],"queryContext":[]} -- !query @@ -54,7 +54,7 @@ select date'015' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 015(line 1, pos 7)\n\n== SQL ==\nselect date'015'\n-------^^^\n"],"queryContext":[]} -- !query @@ -63,7 +63,7 @@ select date'2021-4294967297-11' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)\n\n== SQL ==\nselect date'2021-4294967297-11'\n-------^^^\n"],"queryContext":[]} -- !query @@ -266,7 +266,7 @@ select date_add('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -275,7 +275,7 @@ select date_add('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -284,7 +284,7 @@ select date_add('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -374,7 +374,7 @@ select date_sub('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -383,7 +383,7 @@ select date_sub('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -392,7 +392,7 @@ select date_sub('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -450,7 +450,7 @@ select date_add('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -459,7 +459,7 @@ select date_sub('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -484,7 +484,7 @@ select date '2011-11-11' + 1E1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -525,7 +525,7 @@ select date '2001-10-01' - '2001-09-28' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(DATE '2001-10-01', CAST('2001-09-28' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('2001-09-28' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -566,7 +566,7 @@ select date '2001-09-28' - date_str from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(DATE '2001-09-28', CAST(date_view.date_str AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(date_view.date_str AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -575,7 +575,7 @@ select date'2011-11-11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -584,7 +584,7 @@ select '1' + date'2011-11-11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -814,7 +814,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"],"queryContext":[]} -- !query @@ -823,7 +823,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"],"queryContext":[]} -- !query @@ -832,7 +832,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"],"queryContext":[]} -- !query @@ -977,7 +977,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -986,7 +986,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -995,7 +995,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -1004,7 +1004,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -1013,7 +1013,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -Rounding necessary +{"errorClass":"legacy","messageParameters":["Rounding necessary"],"queryContext":[]} -- !query @@ -1391,7 +1391,7 @@ select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1400,7 +1400,7 @@ select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1433,7 +1433,7 @@ select str - timestamp'2011-11-11 11:11:11' from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(ts_view.str - TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1442,7 +1442,7 @@ select timestamp'2011-11-11 11:11:11' - str from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1451,7 +1451,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1460,7 +1460,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' (double and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1469,7 +1469,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1478,7 +1478,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out index 14012127fb28e..2e5dc96bf17dc 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out @@ -5,7 +5,7 @@ select to_timestamp('294248', 'y') struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/describe.sql.out b/sql/core/src/test/resources/sql-tests/results/describe.sql.out index 94f4902ba8d13..74a96b11c38fd 100644 --- a/sql/core/src/test/resources/sql-tests/results/describe.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/describe.sql.out @@ -362,7 +362,7 @@ DESC t PARTITION (c='Us', d=2) struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Partition not found in table 't' database 'default':\nc -> Us\nd -> 2"],"queryContext":[]} -- !query @@ -371,7 +371,7 @@ DESC t PARTITION (c='Us') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`default`.`t`'"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/except-all.sql.out b/sql/core/src/test/resources/sql-tests/results/except-all.sql.out index 665a5c1787670..508fc05853698 100644 --- a/sql/core/src/test/resources/sql-tests/results/except-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/except-all.sql.out @@ -138,7 +138,7 @@ SELECT array(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["ExceptAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table"],"queryContext":[]} -- !query @@ -210,7 +210,7 @@ SELECT k, v FROM tab4 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["ExceptAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/extract.sql.out b/sql/core/src/test/resources/sql-tests/results/extract.sql.out index f1e942e63af4a..a1be7378f9d3d 100644 --- a/sql/core/src/test/resources/sql-tests/results/extract.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/extract.sql.out @@ -317,7 +317,7 @@ select extract(not_supported from c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -326,7 +326,7 @@ select extract(not_supported from i) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -335,7 +335,7 @@ select extract(not_supported from j) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -648,7 +648,7 @@ select date_part('not_supported', c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -657,7 +657,7 @@ select date_part(c, c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The 'field' parameter of function 'date_part' needs to be a string literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -674,7 +674,7 @@ select date_part(i, i) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The 'field' parameter of function 'date_part' needs to be a string literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -883,7 +883,7 @@ select extract(DAY from interval '2-1' YEAR TO MONTH) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -892,7 +892,7 @@ select date_part('DAY', interval '2-1' YEAR TO MONTH) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -901,7 +901,7 @@ select date_part('not_supported', interval '2-1' YEAR TO MONTH) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1014,7 +1014,7 @@ select extract(MONTH from interval '123 12:34:56.789123123' DAY TO SECOND) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Literals of type 'MONTH' are currently not supported for the interval day to second type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1023,4 +1023,4 @@ select date_part('not_supported', interval '123 12:34:56.789123123' DAY TO SECON struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out b/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out index f8db5a4ec8187..301f36796d8c8 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out @@ -131,7 +131,7 @@ SELECT course, year, SUM(earnings) FROM courseSales GROUP BY ROLLUP(course, year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nEmpty set in ROLLUP grouping sets is not supported.(line 1, pos 61)\n\n== SQL ==\nSELECT course, year, SUM(earnings) FROM courseSales GROUP BY ROLLUP(course, year, (course, year), ()) ORDER BY course, year\n-------------------------------------------------------------^^^\n"],"queryContext":[]} -- !query @@ -188,7 +188,7 @@ SELECT course, year, SUM(earnings) FROM courseSales GROUP BY CUBE(course, year, struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nEmpty set in CUBE grouping sets is not supported.(line 1, pos 61)\n\n== SQL ==\nSELECT course, year, SUM(earnings) FROM courseSales GROUP BY CUBE(course, year, (course, year), ()) ORDER BY course, year\n-------------------------------------------------------------^^^\n"],"queryContext":[]} -- !query @@ -441,7 +441,7 @@ SELECT course, year, GROUPING(course) FROM courseSales GROUP BY course, year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["grouping() can only be used with GroupingSets/Cube/Rollup"],"queryContext":[]} -- !query @@ -450,7 +450,7 @@ SELECT course, year, GROUPING_ID(course, year) FROM courseSales GROUP BY course, struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["grouping_id() can only be used with GroupingSets/Cube/Rollup"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out index 032b4ded6b3a3..9378c0b9bf9ee 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out @@ -48,7 +48,7 @@ SELECT a, COUNT(b) FILTER (WHERE a >= 2) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(count(testdata.b) FILTER (WHERE (testdata.a >= 2)) AS `count(b) FILTER (WHERE (a >= 2))`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -228,7 +228,7 @@ SELECT a, COUNT(b) FILTER (WHERE a != 2) FROM testData GROUP BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -708,7 +708,7 @@ SELECT a + 2, COUNT(b) FILTER (WHERE b IN (1, 2)) FROM testData GROUP BY a + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out index b1898aa4681b2..5f2120b7138a2 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out @@ -92,7 +92,7 @@ select a, b from data group by -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["GROUP BY position -1 is not in select list (valid range is [1, 2]); line 1 pos 31"],"queryContext":[]} -- !query @@ -101,7 +101,7 @@ select a, b from data group by 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["GROUP BY position 0 is not in select list (valid range is [1, 2]); line 1 pos 31"],"queryContext":[]} -- !query @@ -110,7 +110,7 @@ select a, b from data group by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 31"],"queryContext":[]} -- !query @@ -119,7 +119,7 @@ select a, b, sum(b) from data group by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got sum(data.b) AS `sum(b)`; line 1 pos 39"],"queryContext":[]} -- !query @@ -128,7 +128,7 @@ select a, b, sum(b) + 2 from data group by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got (sum(data.b) + CAST(2 AS BIGINT)) AS `(sum(b) + 2)`; line 1 pos 43"],"queryContext":[]} -- !query @@ -152,7 +152,7 @@ select * from data group by a, b, 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Star (*) is not allowed in select list when GROUP BY ordinal position is used"],"queryContext":[]} -- !query @@ -349,7 +349,7 @@ select a, b, count(1) from data group by a, -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["GROUP BY position -1 is not in select list (valid range is [1, 3]); line 1 pos 44"],"queryContext":[]} -- !query @@ -358,7 +358,7 @@ select a, b, count(1) from data group by a, 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got count(1) AS `count(1)`; line 1 pos 44"],"queryContext":[]} -- !query @@ -367,7 +367,7 @@ select a, b, count(1) from data group by cube(-1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["GROUP BY position -1 is not in select list (valid range is [1, 3]); line 1 pos 46"],"queryContext":[]} -- !query @@ -376,7 +376,7 @@ select a, b, count(1) from data group by cube(1, 3) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got count(1) AS `count(1)`; line 1 pos 49"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out index b86d0b7cb082f..015534e43a5dd 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out @@ -15,7 +15,7 @@ SELECT a, COUNT(b) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(count(testdata.b) AS `count(b)`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -43,7 +43,7 @@ SELECT a, COUNT(b) FROM testData GROUP BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -107,7 +107,7 @@ SELECT a + 2, COUNT(b) FROM testData GROUP BY a + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -173,7 +173,7 @@ SELECT COUNT(b) AS k FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["aggregate functions are not allowed in GROUP BY, but found count(testdata.b)"],"queryContext":[]} -- !query @@ -191,7 +191,7 @@ SELECT k AS a, COUNT(v) FROM testDataHasSameNameWithAlias GROUP BY a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expression 'testdatahassamenamewithalias.k' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -280,7 +280,7 @@ SELECT id FROM range(10) HAVING id > 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'id' is not an aggregate function. Wrap '()' in windowing function(s) or wrap 'id' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -314,7 +314,7 @@ SELECT 1 FROM range(10) HAVING MAX(id) > 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(max(id) > CAST(0 AS BIGINT))]\nInvalid expressions: [max(id)]"],"queryContext":[]} -- !query @@ -444,7 +444,7 @@ SELECT every(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'every(1)' due to data type mismatch: argument 1 requires boolean type, however, '1' is of int type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -453,7 +453,7 @@ SELECT some(1S) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'some(1S)' due to data type mismatch: argument 1 requires boolean type, however, '1S' is of smallint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -462,7 +462,7 @@ SELECT any(1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'any(1L)' due to data type mismatch: argument 1 requires boolean type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -471,7 +471,7 @@ SELECT every("true") struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'every('true')' due to data type mismatch: argument 1 requires boolean type, however, ''true'' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -480,7 +480,7 @@ SELECT bool_and(1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'bool_and(1.0BD)' due to data type mismatch: argument 1 requires boolean type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -489,7 +489,7 @@ SELECT bool_or(1.0D) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'bool_or(1.0D)' due to data type mismatch: argument 1 requires boolean type, however, '1.0D' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -609,7 +609,7 @@ SELECT count(*) FROM test_agg WHERE count(*) > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(count(1) > 1L)]\nInvalid expressions: [count(1)]"],"queryContext":[]} -- !query @@ -618,7 +618,7 @@ SELECT count(*) FROM test_agg WHERE count(*) + 1L > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [((count(1) + 1L) > 1L)]\nInvalid expressions: [count(1)]"],"queryContext":[]} -- !query @@ -627,7 +627,7 @@ SELECT count(*) FROM test_agg WHERE k = 1 or k = 2 or count(*) + 1L > 1L or max( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(((test_agg.k = 1) OR (test_agg.k = 2)) OR (((count(1) + 1L) > 1L) OR (max(test_agg.k) > 1)))]\nInvalid expressions: [count(1), max(test_agg.k)]"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out b/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out index 20c93b1d04306..371e449068b2f 100644 --- a/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out @@ -152,7 +152,7 @@ SELECT c1 FROM (values (1,2), (3,2)) t(c1, c2) GROUP BY GROUPING SETS (()) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expression 't.c1' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/having.sql.out b/sql/core/src/test/resources/sql-tests/results/having.sql.out index 592571f36aaff..224912ea1a63d 100644 --- a/sql/core/src/test/resources/sql-tests/results/having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/having.sql.out @@ -35,7 +35,7 @@ SELECT count(k) FROM hav GROUP BY v HAVING v = array(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(hav.v = array(1))' due to data type mismatch: differing types in '(hav.v = array(1))' (int and array).; line 1 pos 43"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out index 2d1d9aedc6d17..ba2603a50e443 100644 --- a/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out @@ -17,7 +17,7 @@ select upper(x -> x) as v struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["A lambda function should only be used in a higher order function. However, its class is org.apache.spark.sql.catalyst.expressions.Upper, which is not a higher order function.; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out b/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out index 41b6b5536a2d6..f47ff4a61e725 100644 --- a/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out @@ -129,4 +129,4 @@ SELECT company FROM ilike_any_table WHERE company ILIKE ALL () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nExpected something between '(' and ')'.(line 1, pos 50)\n\n== SQL ==\nSELECT company FROM ilike_any_table WHERE company ILIKE ALL ()\n--------------------------------------------------^^^\n"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out b/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out index bdd73e7cd3b3d..033fe7579c80b 100644 --- a/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out @@ -135,4 +135,4 @@ SELECT company FROM ilike_any_table WHERE company ILIKE ANY () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nExpected something between '(' and ')'.(line 1, pos 50)\n\n== SQL ==\nSELECT company FROM ilike_any_table WHERE company ILIKE ANY ()\n--------------------------------------------------^^^\n"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out b/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out index 576330dfca965..ed6cba10daab1 100644 --- a/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out @@ -110,7 +110,7 @@ select * from values ("one", rand(5)), ("two", 3.0D) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot evaluate expression rand(5) in inline table definition; line 1 pos 29"],"queryContext":[]} -- !query @@ -119,7 +119,7 @@ select * from values ("one", 2.0), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expected 2 columns but found 1 columns in row 1; line 1 pos 14"],"queryContext":[]} -- !query @@ -128,7 +128,7 @@ select * from values ("one", array(0, 1)), ("two", struct(1, 2)) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["incompatible types found in column b for inline table; line 1 pos 14"],"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select * from values ("one"), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expected 2 columns but found 1 columns in row 0; line 1 pos 14"],"queryContext":[]} -- !query @@ -146,7 +146,7 @@ select * from values ("one", random_not_exist_func(1)), ("two", 2) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Undefined function: random_not_exist_func. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.random_not_exist_func.; line 1 pos 29"],"queryContext":[]} -- !query @@ -155,7 +155,7 @@ select * from values ("one", count(1)), ("two", 2) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot evaluate expression count(1) in inline table definition; line 1 pos 29"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out b/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out index 69ba7f5ad344a..9d3ea78033691 100644 --- a/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out @@ -95,7 +95,7 @@ SELECT array(1), 2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["IntersectAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table"],"queryContext":[]} -- !query @@ -106,7 +106,7 @@ SELECT k, v FROM tab2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["IntersectAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 1ccd8de9c3023..9441e9708b5f4 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -13,7 +13,7 @@ select interval 4 month 2 weeks 3 microseconds * 1.5 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval 4 month 2 weeks 3 microseconds(line 1, pos 7)\n\n== SQL ==\nselect interval 4 month 2 weeks 3 microseconds * 1.5\n-------^^^\n"],"queryContext":[]} -- !query @@ -46,7 +46,7 @@ select interval 2147483647 month * 2 struct<> -- !query output java.lang.ArithmeticException -integer overflow +{"errorClass":"legacy","messageParameters":["integer overflow"],"queryContext":[]} -- !query @@ -55,7 +55,7 @@ select interval 2147483647 month / 0.5 struct<> -- !query output java.lang.ArithmeticException -Overflow +{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} -- !query @@ -64,7 +64,7 @@ select interval 2147483647 day * 2 struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -73,7 +73,7 @@ select interval 2147483647 day / 0.5 struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -178,7 +178,7 @@ select '2' / interval 2 second struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2' AS DOUBLE) / INTERVAL '02' SECOND)' due to data type mismatch: differing types in '(CAST('2' AS DOUBLE) / INTERVAL '02' SECOND)' (double and interval second).; line 1 pos 7"],"queryContext":[]} -- !query @@ -187,7 +187,7 @@ select '2' / interval 2 year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2' AS DOUBLE) / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(CAST('2' AS DOUBLE) / INTERVAL '2' YEAR)' (double and interval year).; line 1 pos 7"],"queryContext":[]} -- !query @@ -262,7 +262,7 @@ select 2 / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(2 / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(2 / INTERVAL '2' YEAR)' (int and interval year).; line 1 pos 7"],"queryContext":[]} -- !query @@ -271,7 +271,7 @@ select 2 / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(2 / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(2 / INTERVAL '02' HOUR)' (int and interval hour).; line 1 pos 7"],"queryContext":[]} -- !query @@ -280,7 +280,7 @@ select null / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(NULL / INTERVAL '2' YEAR)' (void and interval year).; line 1 pos 7"],"queryContext":[]} -- !query @@ -289,7 +289,7 @@ select null / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(NULL / INTERVAL '02' HOUR)' (void and interval hour).; line 1 pos 7"],"queryContext":[]} -- !query @@ -298,7 +298,7 @@ select -interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect -interval '-1 month 1 day -1 second'\n--------^^^\n"],"queryContext":[]} -- !query @@ -323,7 +323,7 @@ select -interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect -interval -1 month 1 day -1 second\n--------^^^\n"],"queryContext":[]} -- !query @@ -348,7 +348,7 @@ select +interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect +interval '-1 month 1 day -1 second'\n--------^^^\n"],"queryContext":[]} -- !query @@ -373,7 +373,7 @@ select +interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect +interval -1 month 1 day -1 second\n--------^^^\n"],"queryContext":[]} -- !query @@ -654,7 +654,7 @@ select make_dt_interval(2147483647) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -695,7 +695,7 @@ select make_ym_interval(178956970, 8) struct<> -- !query output java.lang.ArithmeticException -integer overflow +{"errorClass":"legacy","messageParameters":["integer overflow"],"queryContext":[]} -- !query @@ -712,7 +712,7 @@ select make_ym_interval(-178956970, -9) struct<> -- !query output java.lang.ArithmeticException -integer overflow +{"errorClass":"legacy","messageParameters":["integer overflow"],"queryContext":[]} -- !query @@ -785,7 +785,7 @@ select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisec struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond(line 1, pos 7)\n\n== SQL ==\nselect interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond\n-------^^^\n"],"queryContext":[]} -- !query @@ -818,7 +818,7 @@ select interval '30' year '25' month '-100' day '40' hour '80' minute '299.88998 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second(line 1, pos 7)\n\n== SQL ==\nselect interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second\n-------^^^\n"],"queryContext":[]} -- !query @@ -947,7 +947,7 @@ select interval '20 15:40:32.99899999' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to hour\n----------------^^^\n"],"queryContext":[]} -- !query @@ -956,7 +956,7 @@ select interval '20 15:40:32.99899999' day to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to minute\n----------------^^^\n"],"queryContext":[]} -- !query @@ -965,7 +965,7 @@ select interval '15:40:32.99899999' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40:32.99899999' hour to minute\n----------------^^^\n"],"queryContext":[]} -- !query @@ -974,7 +974,7 @@ select interval '15:40.99899999' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40.99899999' hour to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -983,7 +983,7 @@ select interval '15:40' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40' hour to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -992,7 +992,7 @@ select interval '20 40:32.99899999' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 40:32.99899999' minute to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1001,7 +1001,7 @@ select interval 10 nanoseconds struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nError parsing ' 10 nanoseconds' to interval, invalid unit 'nanoseconds'(line 1, pos 16)\n\n== SQL ==\nselect interval 10 nanoseconds\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1090,7 +1090,7 @@ select interval struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nat least one time unit should be given for interval literal(line 1, pos 7)\n\n== SQL ==\nselect interval\n-------^^^\n"],"queryContext":[]} -- !query @@ -1099,7 +1099,7 @@ select interval 1 fake_unit struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nError parsing ' 1 fake_unit' to interval, invalid unit 'fake_unit'(line 1, pos 16)\n\n== SQL ==\nselect interval 1 fake_unit\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1108,7 +1108,7 @@ select interval 1 year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nThe value of from-to unit must be a string(line 1, pos 16)\n\n== SQL ==\nselect interval 1 year to month\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1117,7 +1117,7 @@ select interval '1' year to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nIntervals FROM year TO second are not supported.(line 1, pos 16)\n\n== SQL ==\nselect interval '1' year to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1126,7 +1126,7 @@ select interval '10-9' year to month '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '2-1' year to month\n-------------------------------------^^^\n"],"queryContext":[]} -- !query @@ -1135,7 +1135,7 @@ select interval '10-9' year to month '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '12:11:10' hour to second\n-------------------------------------^^^\n"],"queryContext":[]} -- !query @@ -1144,7 +1144,7 @@ select interval '1 15:11' day to minute '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 40)\n\n== SQL ==\nselect interval '1 15:11' day to minute '12:11:10' hour to second\n----------------------------------------^^^\n"],"queryContext":[]} -- !query @@ -1153,7 +1153,7 @@ select interval 1 year '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '2-1' year to month\n-----------------------^^^\n"],"queryContext":[]} -- !query @@ -1162,7 +1162,7 @@ select interval 1 year '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '12:11:10' hour to second\n-----------------------^^^\n"],"queryContext":[]} -- !query @@ -1171,7 +1171,7 @@ select interval '10-9' year to month '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '1' year\n-------------------------------------^^^\n"],"queryContext":[]} -- !query @@ -1180,7 +1180,7 @@ select interval '12:11:10' hour to second '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 42)\n\n== SQL ==\nselect interval '12:11:10' hour to second '1' year\n------------------------------------------^^^\n"],"queryContext":[]} -- !query @@ -1189,7 +1189,7 @@ select interval (-30) day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1198,7 +1198,7 @@ select interval (a + 1) day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1216,7 +1216,7 @@ select interval (-30) days struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1225,7 +1225,7 @@ select interval (a + 1) days struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1251,7 +1251,7 @@ SELECT INTERVAL '178956970-8' YEAR TO MONTH struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nError parsing interval year-month string: integer overflow(line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '178956970-8' YEAR TO MONTH\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1296,7 +1296,7 @@ select struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR + CAST('3-3 year to month' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST('3-3 year to month' AS DOUBLE))' (interval year and double).; line 2 pos 2"],"queryContext":[]} -- !query @@ -1321,7 +1321,7 @@ select interval '2' year + '3-3' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR + CAST('3-3' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST('3-3' AS DOUBLE))' (interval year and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1330,7 +1330,7 @@ select interval '2' year - '4' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR - CAST('4' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - CAST('4' AS DOUBLE))' (interval year and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1363,7 +1363,7 @@ select interval '2' year + str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR + CAST(interval_view.str AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST(interval_view.str AS DOUBLE))' (interval year and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1372,7 +1372,7 @@ select interval '2' year - str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR - CAST(interval_view.str AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - CAST(interval_view.str AS DOUBLE))' (interval year and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1397,7 +1397,7 @@ select interval '2-2' year to month + interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1406,7 +1406,7 @@ select interval '3' day + interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1415,7 +1415,7 @@ select interval '2-2' year to month - interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1424,7 +1424,7 @@ select interval '3' day - interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' due to data type mismatch: differing types in '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' (interval day and interval year to month).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1433,7 +1433,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1442,7 +1442,7 @@ select 1 + interval '2' month struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(1 + INTERVAL '2' MONTH)' due to data type mismatch: differing types in '(1 + INTERVAL '2' MONTH)' (int and interval month).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1451,7 +1451,7 @@ select interval '2' second + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '1 + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1460,7 +1460,7 @@ select interval '2' month - 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' MONTH - 1)' due to data type mismatch: differing types in '(INTERVAL '2' MONTH - 1)' (interval month and int).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1517,7 +1517,7 @@ select interval '-\t2-2\t' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match year-month format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: -\t2-2\t(line 1, pos 16)\n\n== SQL ==\nselect interval '-\\t2-2\\t' year to month\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1534,7 +1534,7 @@ select interval '\n-\t10\t 12:34:46.789\t' day to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: \n-\t10\t 12:34:46.789\t, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '\\n-\\t10\\t 12:34:46.789\\t' day to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1543,7 +1543,7 @@ select interval '中文 interval 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 中文 interval 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval '中文 interval 1 day'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1552,7 +1552,7 @@ select interval 'interval中文 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: interval中文 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval中文 1 day'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1561,7 +1561,7 @@ select interval 'interval 1中文day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: interval 1中文day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval 1中文day'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1569,8 +1569,8 @@ select -(a) from values (interval '-2147483648 months', interval '2147483647 mon -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow",""],"queryContext":[]} -- !query @@ -1578,8 +1578,8 @@ select a - b from values (interval '-2147483648 months', interval '2147483647 mo -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_subtract' to tolerate overflow and return NULL instead."],"queryContext":[]} -- !query @@ -1587,8 +1587,8 @@ select b + interval '1 month' from values (interval '-2147483648 months', interv -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_add' to tolerate overflow and return NULL instead."],"queryContext":[]} -- !query @@ -1596,8 +1596,8 @@ select a * 1.1 from values (interval '-2147483648 months', interval '2147483647 -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +java.lang.ArithmeticException +{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} -- !query @@ -1605,8 +1605,8 @@ select a / 0.5 from values (interval '-2147483648 months', interval '2147483647 -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +java.lang.ArithmeticException +{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} -- !query @@ -1667,7 +1667,7 @@ select interval '+' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: +(line 1, pos 7)\n\n== SQL ==\nselect interval '+'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1676,7 +1676,7 @@ select interval '+.' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: +.(line 1, pos 7)\n\n== SQL ==\nselect interval '+.'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1685,7 +1685,7 @@ select interval '1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1694,7 +1694,7 @@ select interval '1.2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1.2(line 1, pos 7)\n\n== SQL ==\nselect interval '1.2'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1703,7 +1703,7 @@ select interval '- 2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: - 2(line 1, pos 7)\n\n== SQL ==\nselect interval '- 2'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1712,7 +1712,7 @@ select interval '1 day -' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1 day -(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day -'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1721,7 +1721,7 @@ select interval '1 day 1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1 day 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day 1'\n-------^^^\n"],"queryContext":[]} -- !query @@ -1730,7 +1730,7 @@ select interval '1 day 2' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: 1 day 2(line 1, pos 16)\n\n== SQL ==\nselect interval '1 day 2' day\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1739,7 +1739,7 @@ select interval 'interval 1' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: interval 1(line 1, pos 16)\n\n== SQL ==\nselect interval 'interval 1' day\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1790,7 +1790,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1.0 struct<> -- !query output java.lang.ArithmeticException -Overflow +{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} -- !query @@ -1799,7 +1799,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1.0D struct<> -- !query output java.lang.ArithmeticException -not in range +{"errorClass":"legacy","messageParameters":["not in range"],"queryContext":[]} -- !query @@ -1842,7 +1842,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0 struct<> -- !query output java.lang.ArithmeticException -Overflow +{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} -- !query @@ -1851,7 +1851,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0D struct<> -- !query output java.lang.ArithmeticException -not in range +{"errorClass":"legacy","messageParameters":["not in range"],"queryContext":[]} -- !query @@ -1956,7 +1956,7 @@ SELECT INTERVAL '106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '106751992 04' DAY TO HOUR\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1965,7 +1965,7 @@ SELECT INTERVAL '-106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-106751992 04' DAY TO HOUR\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1974,7 +1974,7 @@ SELECT INTERVAL '2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '2562047789:00' HOUR TO MINUTE\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1983,7 +1983,7 @@ SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-2562047789:00' HOUR TO MINUTE\n----------------^^^\n"],"queryContext":[]} -- !query @@ -1992,7 +1992,7 @@ SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n"],"queryContext":[]} -- !query @@ -2001,7 +2001,7 @@ SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n"],"queryContext":[]} -- !query @@ -2138,7 +2138,7 @@ SELECT INTERVAL 1 MONTH > INTERVAL 20 DAYS struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' (interval month and interval day).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2147,7 +2147,7 @@ SELECT INTERVAL '1' DAY < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' DAY < '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY < '1')' (interval day and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2156,7 +2156,7 @@ SELECT INTERVAL '1' DAY = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' DAY = '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY = '1')' (interval day and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2165,7 +2165,7 @@ SELECT INTERVAL '1' DAY > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' DAY > '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY > '1')' (interval day and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2174,7 +2174,7 @@ SELECT '1' < INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' < INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' < INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2183,7 +2183,7 @@ SELECT '1' = INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' = INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' = INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2192,7 +2192,7 @@ SELECT '1' > INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' > INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' > INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2201,7 +2201,7 @@ SELECT INTERVAL '1' YEAR < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' YEAR < '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR < '1')' (interval year and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2210,7 +2210,7 @@ SELECT INTERVAL '1' YEAR = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' YEAR = '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR = '1')' (interval year and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2219,7 +2219,7 @@ SELECT INTERVAL '1' YEAR > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' YEAR > '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR > '1')' (interval year and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2228,7 +2228,7 @@ SELECT '1' < INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' < INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' < INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2237,7 +2237,7 @@ SELECT '1' = INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' = INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' = INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2246,7 +2246,7 @@ SELECT '1' > INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' > INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' > INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2271,7 +2271,7 @@ SELECT array(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'array(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function array should all be the same type, but it's [interval month, interval day]; line 1 pos 7"],"queryContext":[]} -- !query @@ -2296,7 +2296,7 @@ SELECT coalesce(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function coalesce should all be the same type, but it's [interval month, interval day]; line 1 pos 7"],"queryContext":[]} -- !query @@ -2353,7 +2353,7 @@ SELECT div(INTERVAL '1' MONTH, INTERVAL '-1' DAY) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' (interval month and interval day).; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out index 235bddb5f5de2..cee6acb23aa8d 100644 --- a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out @@ -114,7 +114,7 @@ SELECT * FROM t1, LATERAL (SELECT t1.*, t2.* FROM t2, LATERAL (SELECT t1.*, t2.* struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 't1.*' given input columns 'c1, c2'; line 1 pos 70"],"queryContext":[]} -- !query @@ -281,7 +281,7 @@ SELECT * FROM t1, LATERAL (SELECT c1 + c2 + rand(0) AS c3) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row\nSubqueryAlias __auto_generated_subquery_name\n+- Project [(cast((outer(c1#x) + outer(c2#x)) as double) + rand(0)) AS c3#x]\n +- OneRowRelation\n; line 1 pos 9"],"queryContext":[]} -- !query @@ -290,7 +290,7 @@ SELECT * FROM t1, LATERAL (SELECT rand(0) FROM t2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row\nSubqueryAlias __auto_generated_subquery_name\n+- Project [rand(0) AS rand(0)#x]\n +- SubqueryAlias spark_catalog.default.t2\n +- View (`spark_catalog`.`default`.`t2`, [c1#x,c2#x])\n +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n +- LocalRelation [col1#x, col2#x]\n; line 1 pos 9"],"queryContext":[]} -- !query @@ -299,7 +299,7 @@ SELECT * FROM t1 JOIN LATERAL (SELECT * FROM t2) s ON t1.c1 + rand(0) = s.c1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Lateral join condition cannot be non-deterministic: ((CAST(spark_catalog.default.t1.c1 AS DOUBLE) + rand(0)) = CAST(s.c1 AS DOUBLE)); line 1 pos 17"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out index 7412241e080b4..a2a9ac486c2c8 100644 --- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out @@ -69,7 +69,7 @@ select to_json(named_struct('a', 1, 'b', 2), named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Must use a map() function for options; line 1 pos 7"],"queryContext":[]} -- !query @@ -78,7 +78,7 @@ select to_json(named_struct('a', 1, 'b', 2), map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["A type of keys and values in map() must be string, but got map; line 1 pos 7"],"queryContext":[]} -- !query @@ -87,7 +87,7 @@ select to_json() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function to_json. Expected: one of 1 and 2; Found: 0; line 1 pos 7"],"queryContext":[]} -- !query @@ -112,7 +112,7 @@ select from_json('{"a":1}', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The expression '1' is not a valid schema string.; line 1 pos 7"],"queryContext":[]} -- !query @@ -121,7 +121,7 @@ select from_json('{"a":1}', 'a InvalidType') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot parse the data type: \n[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n\nFailed fallback parsing: \nDataType invalidtype is not supported.(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n; line 1 pos 7"],"queryContext":[]} -- !query @@ -130,7 +130,7 @@ select from_json('{"a":1}', 'a INT', named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Must use a map() function for options; line 1 pos 7"],"queryContext":[]} -- !query @@ -139,7 +139,7 @@ select from_json('{"a":1}', 'a INT', map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["A type of keys and values in map() must be string, but got map; line 1 pos 7"],"queryContext":[]} -- !query @@ -148,7 +148,7 @@ select from_json() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function from_json. Expected: one of 2 and 3; Found: 0; line 1 pos 7"],"queryContext":[]} -- !query @@ -376,7 +376,7 @@ select schema_of_json(null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'schema_of_json(NULL)' due to data type mismatch: The input json should be a foldable string expression and not null; however, got NULL.; line 1 pos 7"],"queryContext":[]} -- !query @@ -393,7 +393,7 @@ SELECT schema_of_json(jsonField) FROM jsonTable struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'schema_of_json(jsontable.jsonField)' due to data type mismatch: The input json should be a foldable string expression and not null; however, got jsontable.jsonField.; line 1 pos 7"],"queryContext":[]} -- !query @@ -410,7 +410,7 @@ select json_array_length(2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'json_array_length(2)' due to data type mismatch: argument 1 requires string type, however, '2' is of int type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -419,7 +419,7 @@ select json_array_length() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function json_array_length. Expected: 1; Found: 0; line 1 pos 7"],"queryContext":[]} -- !query @@ -492,7 +492,7 @@ select json_object_keys() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function json_object_keys. Expected: 1; Found: 0; line 1 pos 7"],"queryContext":[]} -- !query @@ -509,7 +509,7 @@ select json_object_keys(200) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'json_object_keys(200)' due to data type mismatch: argument 1 requires string type, however, '200' is of int type.; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/like-all.sql.out b/sql/core/src/test/resources/sql-tests/results/like-all.sql.out index bb1838d638da1..cfa3cc77be566 100644 --- a/sql/core/src/test/resources/sql-tests/results/like-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/like-all.sql.out @@ -129,4 +129,4 @@ SELECT company FROM like_all_table WHERE company LIKE ALL () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nExpected something between '(' and ')'.(line 1, pos 49)\n\n== SQL ==\nSELECT company FROM like_all_table WHERE company LIKE ALL ()\n-------------------------------------------------^^^\n"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/like-any.sql.out b/sql/core/src/test/resources/sql-tests/results/like-any.sql.out index 6f35dfb91e5e7..01f8702ee759b 100644 --- a/sql/core/src/test/resources/sql-tests/results/like-any.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/like-any.sql.out @@ -135,4 +135,4 @@ SELECT company FROM like_any_table WHERE company LIKE ANY () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nExpected something between '(' and ')'.(line 1, pos 49)\n\n== SQL ==\nSELECT company FROM like_any_table WHERE company LIKE ANY ()\n-------------------------------------------------^^^\n"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/limit.sql.out b/sql/core/src/test/resources/sql-tests/results/limit.sql.out index 579d2b24cf8d0..c23d6454290c4 100644 --- a/sql/core/src/test/resources/sql-tests/results/limit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/limit.sql.out @@ -50,7 +50,7 @@ SELECT * FROM testdata LIMIT -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The limit expression must be equal to or greater than 0, but got -1"],"queryContext":[]} -- !query @@ -59,7 +59,7 @@ SELECT * FROM testData TABLESAMPLE (-1 ROWS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The limit expression must be equal to or greater than 0, but got -1"],"queryContext":[]} -- !query @@ -76,7 +76,7 @@ SELECT * FROM testdata LIMIT CAST(NULL AS INT) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The evaluated limit expression must not be null, but got CAST(NULL AS INT)"],"queryContext":[]} -- !query @@ -85,7 +85,7 @@ SELECT * FROM testdata LIMIT key > 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The limit expression must evaluate to a constant value, but got (spark_catalog.default.testdata.key > 3)"],"queryContext":[]} -- !query @@ -94,7 +94,7 @@ SELECT * FROM testdata LIMIT true struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The limit expression must be integer type, but got boolean"],"queryContext":[]} -- !query @@ -103,7 +103,7 @@ SELECT * FROM testdata LIMIT 'a' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The limit expression must be integer type, but got string"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index b40cf817fb328..3085fdaeac7b1 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -37,7 +37,7 @@ select 128Y struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nNumeric literal 128 does not fit in range [-128, 127] for type tinyint(line 1, pos 7)\n\n== SQL ==\nselect 128Y\n-------^^^\n"],"queryContext":[]} -- !query @@ -62,7 +62,7 @@ select 32768S struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nNumeric literal 32768 does not fit in range [-32768, 32767] for type smallint(line 1, pos 7)\n\n== SQL ==\nselect 32768S\n-------^^^\n"],"queryContext":[]} -- !query @@ -87,7 +87,7 @@ select 9223372036854775808L struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nNumeric literal 9223372036854775808 does not fit in range [-9223372036854775808, 9223372036854775807] for type bigint(line 1, pos 7)\n\n== SQL ==\nselect 9223372036854775808L\n-------^^^\n"],"queryContext":[]} -- !query @@ -128,7 +128,7 @@ select 1234567890123456789012345678901234567890 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890"],"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select 1234567890123456789012345678901234567890.0 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890.0"],"queryContext":[]} -- !query @@ -162,7 +162,7 @@ select -3.4028235E39f struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nNumeric literal -3.4028235E39 does not fit in range [-3.4028234663852886E+38, 3.4028234663852886E+38] for type float(line 1, pos 7)\n\n== SQL ==\nselect -3.4028235E39f\n-------^^^\n"],"queryContext":[]} -- !query @@ -196,7 +196,7 @@ select 1E309, -1E309 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nNumeric literal 1E309 does not fit in range [-1.7976931348623157E+308, 1.7976931348623157E+308] for type double(line 1, pos 7)\n\n== SQL ==\nselect 1E309, -1E309\n-------^^^\n"],"queryContext":[]} -- !query @@ -295,7 +295,7 @@ select date 'mar 11 2016' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: mar 11 2016(line 1, pos 7)\n\n== SQL ==\nselect date 'mar 11 2016'\n-------^^^\n"],"queryContext":[]} -- !query @@ -312,7 +312,7 @@ select timestamp '2016-33-11 20:54:00.000' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2016-33-11 20:54:00.000(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2016-33-11 20:54:00.000'\n-------^^^\n"],"queryContext":[]} -- !query @@ -321,7 +321,7 @@ select GEO '(10,-6)' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nLiterals of type 'GEO' are currently not supported.(line 1, pos 7)\n\n== SQL ==\nselect GEO '(10,-6)'\n-------^^^\n"],"queryContext":[]} -- !query @@ -338,7 +338,7 @@ select 1.20E-38BD struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38(line 1, pos 7)\n\n== SQL ==\nselect 1.20E-38BD\n-------^^^\n"],"queryContext":[]} -- !query @@ -355,7 +355,7 @@ select X'XuZ' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\ncontains illegal character for hexBinary: 0XuZ(line 1, pos 7)\n\n== SQL ==\nselect X'XuZ'\n-------^^^\n"],"queryContext":[]} -- !query @@ -372,7 +372,7 @@ select +date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(+ DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -381,7 +381,7 @@ select +timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -398,7 +398,7 @@ select +map(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(+ map(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'map(1, 2)' is of map type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -407,7 +407,7 @@ select +array(1,2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(+ array(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'array(1, 2)' is of array type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -416,7 +416,7 @@ select +named_struct('a', 1, 'b', 'spark') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(+ named_struct('a', 1, 'b', 'spark'))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'named_struct('a', 1, 'b', 'spark')' is of struct type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -425,7 +425,7 @@ select +X'1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(+ X'01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'01'' is of binary type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -434,7 +434,7 @@ select -date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(- DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -443,7 +443,7 @@ select -timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(- TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -452,4 +452,4 @@ select -x'2379ACFe' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(- X'2379ACFE')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'2379ACFE'' is of binary type.; line 1 pos 7"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/map.sql.out b/sql/core/src/test/resources/sql-tests/results/map.sql.out index 2a7f8459fa6ea..8f4a909f4b464 100644 --- a/sql/core/src/test/resources/sql-tests/results/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/map.sql.out @@ -69,7 +69,7 @@ select map_contains_key(map('1', 'a', '2', 'b'), 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'map_contains_key(map('1', 'a', '2', 'b'), 1)' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, int].; line 1 pos 7"],"queryContext":[]} -- !query @@ -78,4 +78,4 @@ select map_contains_key(map(1, 'a', 2, 'b'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'map_contains_key(map(1, 'a', 2, 'b'), '1')' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, string].; line 1 pos 7"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out index a0f9f2af04de8..ccecafafa58aa 100644 --- a/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out @@ -68,8 +68,8 @@ SELECT assert_true(false) -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +java.lang.RuntimeException +{"errorClass":"legacy","messageParameters":["'false' is not true!"],"queryContext":[]} -- !query @@ -77,8 +77,8 @@ SELECT assert_true(boolean(0)) -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +java.lang.RuntimeException +{"errorClass":"legacy","messageParameters":["'cast(0 as boolean)' is not true!"],"queryContext":[]} -- !query @@ -86,8 +86,8 @@ SELECT assert_true(null) -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +java.lang.RuntimeException +{"errorClass":"legacy","messageParameters":["'null' is not true!"],"queryContext":[]} -- !query @@ -95,8 +95,8 @@ SELECT assert_true(boolean(null)) -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +java.lang.RuntimeException +{"errorClass":"legacy","messageParameters":["'cast(null as boolean)' is not true!"],"queryContext":[]} -- !query @@ -104,8 +104,8 @@ SELECT assert_true(false, 'custom error message') -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +java.lang.RuntimeException +{"errorClass":"legacy","messageParameters":["custom error message"],"queryContext":[]} -- !query @@ -121,8 +121,8 @@ SELECT raise_error('error message') -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +java.lang.RuntimeException +{"errorClass":"legacy","messageParameters":["error message"],"queryContext":[]} -- !query @@ -130,5 +130,5 @@ SELECT if(v > 5, raise_error('too big: ' || v), v + 1) FROM tbl_misc -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +java.lang.RuntimeException +{"errorClass":"legacy","messageParameters":["too big: 8"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out b/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out index bc17092f0a0dc..1aa6259dc23d1 100644 --- a/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out @@ -72,7 +72,7 @@ select * from data order by 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["ORDER BY position 0 is not in select list (valid range is [1, 2]); line 1 pos 28"],"queryContext":[]} -- !query @@ -81,7 +81,7 @@ select * from data order by -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["ORDER BY position -1 is not in select list (valid range is [1, 2]); line 1 pos 28"],"queryContext":[]} -- !query @@ -90,7 +90,7 @@ select * from data order by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["ORDER BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 28"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out b/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out index 1e399e7667823..1a0e4f49971f6 100644 --- a/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out @@ -178,7 +178,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_cont'."],"queryContext":[]} -- !query @@ -194,7 +194,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_disc'."],"queryContext":[]} -- !query @@ -209,7 +209,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'median'."],"queryContext":[]} -- !query @@ -225,7 +225,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_cont'."],"queryContext":[]} -- !query @@ -241,7 +241,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_disc'."],"queryContext":[]} -- !query @@ -256,7 +256,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'median'."],"queryContext":[]} -- !query @@ -336,7 +336,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_cont'."],"queryContext":[]} -- !query @@ -353,7 +353,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_disc'."],"queryContext":[]} -- !query @@ -369,7 +369,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'median'."],"queryContext":[]} -- !query @@ -386,7 +386,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_cont'."],"queryContext":[]} -- !query @@ -403,7 +403,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_disc'."],"queryContext":[]} -- !query @@ -419,7 +419,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'median'."],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/pivot.sql.out b/sql/core/src/test/resources/sql-tests/results/pivot.sql.out index 4af0434622598..c43199ea50bca 100644 --- a/sql/core/src/test/resources/sql-tests/results/pivot.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/pivot.sql.out @@ -199,7 +199,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Aggregate expression required for pivot, but 'coursesales.earnings' did not appear in any aggregate function."],"queryContext":[]} -- !query @@ -214,7 +214,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Aggregate expression required for pivot, but '__auto_generated_subquery_name.year' did not appear in any aggregate function."],"queryContext":[]} -- !query @@ -259,7 +259,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out index ec57cbba057e0..f64b771c25a7a 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out @@ -484,7 +484,7 @@ having exists (select 1 from onek b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: sum(DISTINCT (outer(a.four) + b.four))"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out index f5b3286795aba..cbd9956362887 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out @@ -5,7 +5,7 @@ select max(min(unique1)) from tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out index a5095b85a0d83..2f132c78eb7de 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out @@ -476,7 +476,7 @@ INSERT INTO BOOLTBL2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["failed to evaluate expression CAST('XXX' AS BOOLEAN): [CAST_INVALID_INPUT] The value 'XXX' of the type \"STRING\" cannot be cast to \"BOOLEAN\" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.\n== SQL(line 2, position 12) ==\n VALUES (boolean('XXX'))\n ^^^^^^^^^^^^^^\n; line 2 pos 3"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out index e488bc5ef5095..6329299e7b7ef 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out @@ -53,7 +53,7 @@ CREATE VIEW key_dependent_view AS struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expression 'spark_catalog.default.view_base_table.data' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -257,7 +257,7 @@ CREATE VIEW v1_temp AS SELECT * FROM temp_table struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v1_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query @@ -313,7 +313,7 @@ CREATE VIEW temp_view_test.v3_temp AS SELECT * FROM temp_table struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v3_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query @@ -362,7 +362,7 @@ CREATE VIEW v4_temp AS struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v4_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query @@ -374,7 +374,7 @@ CREATE VIEW v5_temp AS struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v5_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query @@ -533,7 +533,7 @@ CREATE VIEW v6_temp AS SELECT * FROM base_table WHERE id IN (SELECT id FROM temp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v6_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query @@ -542,7 +542,7 @@ CREATE VIEW v7_temp AS SELECT t1.id, t2.a FROM base_table t1, (SELECT * FROM tem struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v7_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query @@ -551,7 +551,7 @@ CREATE VIEW v8_temp AS SELECT * FROM base_table WHERE EXISTS (SELECT 1 FROM temp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v8_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query @@ -560,7 +560,7 @@ CREATE VIEW v9_temp AS SELECT * FROM base_table WHERE NOT EXISTS (SELECT 1 FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v9_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query @@ -569,7 +569,7 @@ CREATE VIEW v10_temp AS SELECT * FROM v7_temp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view not found: v7_temp; line 1 pos 38"],"queryContext":[]} -- !query @@ -578,7 +578,7 @@ CREATE VIEW v11_temp AS SELECT t1.id, t2.a FROM base_table t1, v10_temp t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view not found: v10_temp; line 1 pos 63"],"queryContext":[]} -- !query @@ -587,7 +587,7 @@ CREATE VIEW v12_temp AS SELECT true FROM v11_temp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view not found: v11_temp; line 1 pos 41"],"queryContext":[]} -- !query @@ -669,7 +669,7 @@ CREATE VIEW temporal1 AS SELECT * FROM t1 CROSS JOIN tt struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal1` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query @@ -710,7 +710,7 @@ CREATE VIEW temporal2 AS SELECT * FROM t1 INNER JOIN tt ON t1.num = tt.num2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal2` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query @@ -751,7 +751,7 @@ CREATE VIEW temporal3 AS SELECT * FROM t1 LEFT JOIN tt ON t1.num = tt.num2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal3` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query @@ -792,7 +792,7 @@ CREATE VIEW temporal4 AS SELECT * FROM t1 LEFT JOIN tt ON t1.num = tt.num2 AND t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal4` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query @@ -801,7 +801,7 @@ CREATE VIEW temporal5 AS SELECT * FROM t1 WHERE num IN (SELECT num FROM t1 WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal5` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out index ff959dba2e3ad..899fb494c801a 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out @@ -198,7 +198,7 @@ SELECT date '1999 Jan 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 Jan 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 Jan 08'\n-------^^^\n"],"queryContext":[]} -- !query @@ -207,7 +207,7 @@ SELECT date '1999 08 Jan' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 08 Jan(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 Jan'\n-------^^^\n"],"queryContext":[]} -- !query @@ -232,7 +232,7 @@ SELECT date '1999 01 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 01 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 01 08'\n-------^^^\n"],"queryContext":[]} -- !query @@ -241,7 +241,7 @@ SELECT date '1999 08 01' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 08 01(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 01'\n-------^^^\n"],"queryContext":[]} -- !query @@ -258,7 +258,7 @@ SELECT date '1999 Jan 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 Jan 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 Jan 08'\n-------^^^\n"],"queryContext":[]} -- !query @@ -267,7 +267,7 @@ SELECT date '1999 08 Jan' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 08 Jan(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 Jan'\n-------^^^\n"],"queryContext":[]} -- !query @@ -292,7 +292,7 @@ SELECT date '1999 01 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 01 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 01 08'\n-------^^^\n"],"queryContext":[]} -- !query @@ -301,7 +301,7 @@ SELECT date '1999 08 01' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 08 01(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 01'\n-------^^^\n"],"queryContext":[]} -- !query @@ -326,7 +326,7 @@ SELECT date '1999 Jan 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 Jan 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 Jan 08'\n-------^^^\n"],"queryContext":[]} -- !query @@ -335,7 +335,7 @@ SELECT date '1999 08 Jan' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 08 Jan(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 Jan'\n-------^^^\n"],"queryContext":[]} -- !query @@ -360,7 +360,7 @@ SELECT date '1999 01 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 01 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 01 08'\n-------^^^\n"],"queryContext":[]} -- !query @@ -369,7 +369,7 @@ SELECT date '1999 08 01' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 08 01(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 01'\n-------^^^\n"],"queryContext":[]} -- !query @@ -518,7 +518,7 @@ select make_date(2013, 2, 30) struct<> -- !query output java.time.DateTimeException -Invalid date 'FEBRUARY 30'. If necessary set spark.sql.ansi.enabled to false to bypass this error. +{"errorClass":"legacy","messageParameters":["Invalid date 'FEBRUARY 30'. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} -- !query @@ -527,7 +527,7 @@ select make_date(2013, 13, 1) struct<> -- !query output java.time.DateTimeException -Invalid value for MonthOfYear (valid values 1 - 12): 13. If necessary set spark.sql.ansi.enabled to false to bypass this error. +{"errorClass":"legacy","messageParameters":["Invalid value for MonthOfYear (valid values 1 - 12): 13. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} -- !query @@ -536,7 +536,7 @@ select make_date(2013, 11, -1) struct<> -- !query output java.time.DateTimeException -Invalid value for DayOfMonth (valid values 1 - 28/31): -1. If necessary set spark.sql.ansi.enabled to false to bypass this error. +{"errorClass":"legacy","messageParameters":["Invalid value for DayOfMonth (valid values 1 - 28/31): -1. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out index 1ee88355f9b5d..e83eee6d78b03 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out @@ -196,8 +196,8 @@ SELECT '' AS five, i.f1, i.f1 * smallint('2') AS x FROM INT4_TBL i -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_multiply' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":44,"fragment":"i.f1 * smallint('2'"}]} -- !query @@ -216,8 +216,8 @@ SELECT '' AS five, i.f1, i.f1 * int('2') AS x FROM INT4_TBL i -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_multiply' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":39,"fragment":"i.f1 * int('2'"}]} -- !query @@ -236,8 +236,8 @@ SELECT '' AS five, i.f1, i.f1 + smallint('2') AS x FROM INT4_TBL i -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_add' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":44,"fragment":"i.f1 + smallint('2'"}]} -- !query @@ -257,8 +257,8 @@ SELECT '' AS five, i.f1, i.f1 + int('2') AS x FROM INT4_TBL i -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_add' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":39,"fragment":"i.f1 + int('2'"}]} -- !query @@ -278,8 +278,8 @@ SELECT '' AS five, i.f1, i.f1 - smallint('2') AS x FROM INT4_TBL i -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_subtract' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":44,"fragment":"i.f1 - smallint('2'"}]} -- !query @@ -299,8 +299,8 @@ SELECT '' AS five, i.f1, i.f1 - int('2') AS x FROM INT4_TBL i -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_subtract' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":39,"fragment":"i.f1 - int('2'"}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out index f5901bdb1cf2a..606279c1accae 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out @@ -388,8 +388,8 @@ SELECT '' AS three, q1, q2, q1 * q2 AS multiply FROM INT8_TBL -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["long overflow"," Use 'try_multiply' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":28,"stopIndex":34,"fragment":"q1 * q"}]} -- !query @@ -678,8 +678,8 @@ SELECT CAST(q1 AS int) FROM int8_tbl WHERE q2 <> 456 -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["4567890123456789L","\"BIGINT\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -695,8 +695,8 @@ SELECT CAST(q1 AS smallint) FROM int8_tbl WHERE q2 <> 456 -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["4567890123456789L","\"BIGINT\"","\"SMALLINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} -- !query @@ -771,7 +771,7 @@ SELECT * FROM range(bigint('+4567890123456789'), bigint('+4567890123456799'), 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (long, long, integer): requirement failed: step (0) cannot be 0; line 1 pos 14"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out index fa3e99f676b45..f344f6b347216 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out @@ -101,7 +101,7 @@ SELECT interval '1 2:03' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' day to hour\n----------------^^^\n"],"queryContext":[]} -- !query @@ -110,7 +110,7 @@ SELECT interval '1 2:03:04' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' day to hour\n----------------^^^\n"],"queryContext":[]} -- !query @@ -127,7 +127,7 @@ SELECT interval '1 2:03:04' day to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' day to minute\n----------------^^^\n"],"queryContext":[]} -- !query @@ -136,7 +136,7 @@ SELECT interval '1 2:03' day to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' day to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -153,7 +153,7 @@ SELECT interval '1 2:03' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' hour to minute\n----------------^^^\n"],"queryContext":[]} -- !query @@ -162,7 +162,7 @@ SELECT interval '1 2:03:04' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' hour to minute\n----------------^^^\n"],"queryContext":[]} -- !query @@ -171,7 +171,7 @@ SELECT interval '1 2:03' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' hour to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -180,7 +180,7 @@ SELECT interval '1 2:03:04' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' hour to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -189,7 +189,7 @@ SELECT interval '1 2:03' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' minute to second\n----------------^^^\n"],"queryContext":[]} -- !query @@ -198,4 +198,4 @@ SELECT interval '1 2:03:04' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' minute to second\n----------------^^^\n"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out index 749c23a697bde..20eb695b1962b 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out @@ -546,7 +546,7 @@ SELECT '' AS `xxx`, i, k, t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'i' is ambiguous, could be: spark_catalog.default.j1_tbl.i, spark_catalog.default.j2_tbl.i.; line 1 pos 20"],"queryContext":[]} -- !query @@ -3235,7 +3235,7 @@ select * from struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'f1' is ambiguous, could be: j.f1, j.f1.; line 2 pos 63"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out index 5798461b9ce44..e9ef90d68f5e2 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out @@ -131,7 +131,7 @@ select * from int8_tbl limit (case when random() < 0.5 then bigint(null) end) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The limit expression must evaluate to a constant value, but got CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN CAST(NULL AS BIGINT) END"],"queryContext":[]} -- !query @@ -140,7 +140,7 @@ select * from int8_tbl offset (case when random() < 0.5 then bigint(null) end) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The offset expression must evaluate to a constant value, but got CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN CAST(NULL AS BIGINT) END"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out index df3df33ff4ffc..0dd8f27794f20 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out @@ -3581,7 +3581,7 @@ INSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.423308199106402476 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38\n== SQL ==\nINSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.42330819910640247627)"],"queryContext":[]} -- !query @@ -3824,7 +3824,7 @@ INSERT INTO num_result SELECT t1.id, t2.id, t1.val, t2.val, t1.val * t2.val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["`spark_catalog`.`default`.`num_result` requires that the data to be inserted have the same number of columns as the target table: target table has 3 column(s) but the inserted data has 5 column(s), including 0 partition column(s) having constant value(s)."],"queryContext":[]} -- !query @@ -4681,7 +4681,7 @@ SELECT '' AS to_number_1, to_number('-34,338,492', '99G999G999') struct<> -- !query output java.lang.IllegalArgumentException -The input string '-34,338,492' does not match the given number format: '99G999G999' +{"errorClass":"legacy","messageParameters":["The input string '-34,338,492' does not match the given number format: '99G999G999'"],"queryContext":[]} -- !query @@ -4690,7 +4690,7 @@ SELECT '' AS to_number_2, to_number('-34,338,492.654,878', '99G999G999D999G999' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'to_number('-34,338,492.654,878', '99G999G999D999G999')' due to data type mismatch: Thousands separators (, or G) may not appear after the decimal point in the number format: '99G999G999D999G999'; line 1 pos 27"],"queryContext":[]} -- !query @@ -4739,7 +4739,7 @@ SELECT '' AS to_number_15, to_number('123,000','999G') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'to_number('123,000', '999G')' due to data type mismatch: Thousands separators (, or G) must have digits in between them in the number format: '999G'; line 1 pos 27"],"queryContext":[]} -- !query @@ -4748,7 +4748,7 @@ SELECT '' AS to_number_16, to_number('123456','999G999') struct<> -- !query output java.lang.IllegalArgumentException -The input string '123456' does not match the given number format: '999G999' +{"errorClass":"legacy","messageParameters":["The input string '123456' does not match the given number format: '999G999'"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out index 3b4adb5aebe28..00d634b521403 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out @@ -140,7 +140,7 @@ SELECT a FROM test_having HAVING min(a) < max(a) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'spark_catalog.default.test_having.a' is not an aggregate function. Wrap '(min(spark_catalog.default.test_having.a) AS `min(a#x)`, max(spark_catalog.default.test_having.a) AS `max(a#x)`)' in windowing function(s) or wrap 'spark_catalog.default.test_having.a' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -173,8 +173,8 @@ SELECT 1 AS one FROM test_having WHERE 1/a = 1 HAVING 1 < 2 -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":39,"stopIndex":41,"fragment":"1/"}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out index 1021d9027e7c5..3feb31964fbab 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out @@ -200,7 +200,7 @@ SELECT c, count(*) FROM test_missing_target GROUP BY 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 53"],"queryContext":[]} -- !query @@ -211,7 +211,7 @@ SELECT count(*) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 10"],"queryContext":[]} -- !query @@ -387,7 +387,7 @@ SELECT count(x.a) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 10"],"queryContext":[]} -- !query @@ -411,7 +411,7 @@ SELECT count(b) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'b' is ambiguous, could be: x.b, y.b.; line 1 pos 13"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out index 586fe4446fef5..874b5281c987f 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out @@ -443,7 +443,7 @@ SELECT 'maca' LIKE 'm%aca' ESCAPE '%' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["the pattern 'm%aca' is invalid, the escape character is not allowed to precede 'a'"],"queryContext":[]} -- !query @@ -452,7 +452,7 @@ SELECT 'maca' NOT LIKE 'm%aca' ESCAPE '%' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["the pattern 'm%aca' is invalid, the escape character is not allowed to precede 'a'"],"queryContext":[]} -- !query @@ -461,7 +461,7 @@ SELECT 'ma%a' LIKE 'm%a%%a' ESCAPE '%' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["the pattern 'm%a%%a' is invalid, the escape character is not allowed to precede 'a'"],"queryContext":[]} -- !query @@ -470,7 +470,7 @@ SELECT 'ma%a' NOT LIKE 'm%a%%a' ESCAPE '%' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["the pattern 'm%a%%a' is invalid, the escape character is not allowed to precede 'a'"],"queryContext":[]} -- !query @@ -479,7 +479,7 @@ SELECT 'bear' LIKE 'b_ear' ESCAPE '_' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["the pattern 'b_ear' is invalid, the escape character is not allowed to precede 'e'"],"queryContext":[]} -- !query @@ -488,7 +488,7 @@ SELECT 'bear' NOT LIKE 'b_ear' ESCAPE '_' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["the pattern 'b_ear' is invalid, the escape character is not allowed to precede 'e'"],"queryContext":[]} -- !query @@ -497,7 +497,7 @@ SELECT 'be_r' LIKE 'b_e__r' ESCAPE '_' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["the pattern 'b_e__r' is invalid, the escape character is not allowed to precede 'e'"],"queryContext":[]} -- !query @@ -506,7 +506,7 @@ SELECT 'be_r' NOT LIKE 'b_e__r' ESCAPE '_' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["the pattern 'b_e__r' is invalid, the escape character is not allowed to precede 'e'"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out index 474e6d0b81fa4..7f777e0d65d29 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out @@ -216,7 +216,7 @@ select format_string('Hello %s %s', 'World') struct<> -- !query output java.util.MissingFormatArgumentException -Format specifier '%s' +{"errorClass":"legacy","messageParameters":["Format specifier '%s'"],"queryContext":[]} -- !query @@ -225,7 +225,7 @@ select format_string('Hello %s') struct<> -- !query output java.util.MissingFormatArgumentException -Format specifier '%s' +{"errorClass":"legacy","messageParameters":["Format specifier '%s'"],"queryContext":[]} -- !query @@ -258,7 +258,7 @@ select format_string('%1$s %4$s', 1, 2, 3) struct<> -- !query output java.util.MissingFormatArgumentException -Format specifier '%4$s' +{"errorClass":"legacy","messageParameters":["Format specifier '%4$s'"],"queryContext":[]} -- !query @@ -267,7 +267,7 @@ select format_string('%1$s %13$s', 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) struct<> -- !query output java.util.MissingFormatArgumentException -Format specifier '%13$s' +{"errorClass":"legacy","messageParameters":["Format specifier '%13$s'"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out index a2d16a5a4435a..8c0e5c2ad4986 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out @@ -221,8 +221,8 @@ from range(9223372036854775804, 9223372036854775807) x -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["long overflow"," Use 'try_add' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":-1,"stopIndex":-1,"fragment":""}]} -- !query @@ -231,8 +231,8 @@ from range(-9223372036854775806, -9223372036854775805) x -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["long overflow"," Use 'try_add' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":-1,"stopIndex":-1,"fragment":""}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out index 7b49594f85d7f..41696d761a8cc 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out @@ -69,7 +69,7 @@ insert into datetimes values struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): [CAST_INVALID_INPUT] The value '11:00 BST' of the type \"STRING\" cannot be cast to \"TIMESTAMP\" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.\n== SQL(line 2, position 24) ==\n(1, timestamp '11:00', cast ('11:00 BST' as timestamp), cast ('1 year' as timestamp), ...\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n; line 1 pos 22"],"queryContext":[]} -- !query @@ -242,7 +242,7 @@ from t1 where f1 = f2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY spark_catalog.default.t1.f1 RANGE BETWEEN 1 PRECEDING AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 24"],"queryContext":[]} -- !query @@ -292,7 +292,7 @@ SELECT * FROM empsalary WHERE row_number() OVER (ORDER BY salary) < 10 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["It is not allowed to use window functions inside WHERE clause"],"queryContext":[]} -- !query @@ -301,7 +301,7 @@ SELECT * FROM empsalary INNER JOIN tenk1 ON row_number() OVER (ORDER BY salary) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nThe query operator `Join` contains one or more unsupported\nexpression types Aggregate, Window or Generate.\nInvalid expressions: [row_number() OVER (ORDER BY spark_catalog.default.empsalary.salary ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]"],"queryContext":[]} -- !query @@ -310,7 +310,7 @@ SELECT rank() OVER (ORDER BY 1), count(*) FROM empsalary GROUP BY 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nThe query operator `Aggregate` contains one or more unsupported\nexpression types Aggregate, Window or Generate.\nInvalid expressions: [RANK() OVER (ORDER BY 1 ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]"],"queryContext":[]} -- !query @@ -328,7 +328,7 @@ SELECT * FROM empsalary WHERE (rank() OVER (ORDER BY random())) > 10 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["It is not allowed to use window functions inside WHERE clause"],"queryContext":[]} -- !query @@ -337,7 +337,7 @@ SELECT * FROM empsalary WHERE rank() OVER (ORDER BY random()) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["It is not allowed to use window functions inside WHERE clause"],"queryContext":[]} -- !query @@ -355,7 +355,7 @@ SELECT range(1, 100) OVER () FROM empsalary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Undefined function: range. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.range.; line 1 pos 7"],"queryContext":[]} -- !query @@ -364,7 +364,7 @@ SELECT ntile(0) OVER (ORDER BY ten), ten, four FROM tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'ntile(0)' due to data type mismatch: Buckets expression must be positive, but got: 0; line 1 pos 7"],"queryContext":[]} -- !query @@ -373,7 +373,7 @@ SELECT nth_value(four, 0) OVER (ORDER BY ten), ten, four FROM tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'nth_value(spark_catalog.default.tenk1.four, 0)' due to data type mismatch: The 'offset' argument of nth_value must be greater than zero but it is 0.; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out index 9ad4361d40f01..f430b2759ccb3 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out @@ -498,4 +498,4 @@ FROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["failed to evaluate expression CAST('nan' AS INT): [CAST_INVALID_INPUT] The value 'nan' of the type \"STRING\" cannot be cast to \"INT\" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.\n== SQL(line 3, position 29) ==\nFROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b)\n ^^^^^^^^^^^^^^^^^^\n; line 3 pos 6"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out index d9e8c33975830..1ed01dec2522e 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out @@ -216,7 +216,7 @@ SELECT * FROM outermost ORDER BY 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view not found: outermost; line 4 pos 23"],"queryContext":[]} -- !query @@ -346,7 +346,7 @@ create table foo (with baz) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nDataType baz is not supported.(line 1, pos 23)\n\n== SQL ==\ncreate table foo (with baz)\n-----------------------^^^\n"],"queryContext":[]} -- !query @@ -355,7 +355,7 @@ create table foo (with ordinality) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nDataType ordinality is not supported.(line 1, pos 23)\n\n== SQL ==\ncreate table foo (with ordinality)\n-----------------------^^^\n"],"queryContext":[]} -- !query @@ -372,7 +372,7 @@ WITH test AS (SELECT 42) INSERT INTO test VALUES (1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table not found: test; line 1 pos 37"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/random.sql.out b/sql/core/src/test/resources/sql-tests/results/random.sql.out index 51ef5efba58ec..f83db80b17b7a 100644 --- a/sql/core/src/test/resources/sql-tests/results/random.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/random.sql.out @@ -37,7 +37,7 @@ SELECT rand(1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'rand(1.0BD)' due to data type mismatch: argument 1 requires (int or bigint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -78,4 +78,4 @@ SELECT rand('1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'rand('1')' due to data type mismatch: argument 1 requires (int or bigint) type, however, ''1'' is of string type.; line 1 pos 7"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out index b9da66b050648..a29b67d11083e 100644 --- a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out @@ -5,7 +5,7 @@ SELECT regexp_extract('1a 2b 14m', '\\d+') struct<> -- !query output java.lang.IllegalArgumentException -Regex group count is 0, but the specified group index is 1 +{"errorClass":"legacy","messageParameters":["Regex group count is 0, but the specified group index is 1"],"queryContext":[]} -- !query @@ -22,7 +22,7 @@ SELECT regexp_extract('1a 2b 14m', '\\d+', 1) struct<> -- !query output java.lang.IllegalArgumentException -Regex group count is 0, but the specified group index is 1 +{"errorClass":"legacy","messageParameters":["Regex group count is 0, but the specified group index is 1"],"queryContext":[]} -- !query @@ -31,7 +31,7 @@ SELECT regexp_extract('1a 2b 14m', '\\d+', 2) struct<> -- !query output java.lang.IllegalArgumentException -Regex group count is 0, but the specified group index is 2 +{"errorClass":"legacy","messageParameters":["Regex group count is 0, but the specified group index is 2"],"queryContext":[]} -- !query @@ -40,7 +40,7 @@ SELECT regexp_extract('1a 2b 14m', '\\d+', -1) struct<> -- !query output java.lang.IllegalArgumentException -The specified group index cannot be less than zero +{"errorClass":"legacy","messageParameters":["The specified group index cannot be less than zero"],"queryContext":[]} -- !query @@ -97,7 +97,7 @@ SELECT regexp_extract('1a 2b 14m', '(\\d+)([a-z]+)', 3) struct<> -- !query output java.lang.IllegalArgumentException -Regex group count is 2, but the specified group index is 3 +{"errorClass":"legacy","messageParameters":["Regex group count is 2, but the specified group index is 3"],"queryContext":[]} -- !query @@ -106,7 +106,7 @@ SELECT regexp_extract('1a 2b 14m', '(\\d+)([a-z]+)', -1) struct<> -- !query output java.lang.IllegalArgumentException -The specified group index cannot be less than zero +{"errorClass":"legacy","messageParameters":["The specified group index cannot be less than zero"],"queryContext":[]} -- !query @@ -140,7 +140,7 @@ SELECT regexp_extract_all('1a 2b 14m', '\\d+') struct<> -- !query output java.lang.IllegalArgumentException -Regex group count is 0, but the specified group index is 1 +{"errorClass":"legacy","messageParameters":["Regex group count is 0, but the specified group index is 1"],"queryContext":[]} -- !query @@ -157,7 +157,7 @@ SELECT regexp_extract_all('1a 2b 14m', '\\d+', 1) struct<> -- !query output java.lang.IllegalArgumentException -Regex group count is 0, but the specified group index is 1 +{"errorClass":"legacy","messageParameters":["Regex group count is 0, but the specified group index is 1"],"queryContext":[]} -- !query @@ -166,7 +166,7 @@ SELECT regexp_extract_all('1a 2b 14m', '\\d+', 2) struct<> -- !query output java.lang.IllegalArgumentException -Regex group count is 0, but the specified group index is 2 +{"errorClass":"legacy","messageParameters":["Regex group count is 0, but the specified group index is 2"],"queryContext":[]} -- !query @@ -175,7 +175,7 @@ SELECT regexp_extract_all('1a 2b 14m', '\\d+', -1) struct<> -- !query output java.lang.IllegalArgumentException -The specified group index cannot be less than zero +{"errorClass":"legacy","messageParameters":["The specified group index cannot be less than zero"],"queryContext":[]} -- !query @@ -232,7 +232,7 @@ SELECT regexp_extract_all('1a 2b 14m', '(\\d+)([a-z]+)', 3) struct<> -- !query output java.lang.IllegalArgumentException -Regex group count is 2, but the specified group index is 3 +{"errorClass":"legacy","messageParameters":["Regex group count is 2, but the specified group index is 3"],"queryContext":[]} -- !query @@ -241,7 +241,7 @@ SELECT regexp_extract_all('1a 2b 14m', '(\\d+)([a-z]+)', -1) struct<> -- !query output java.lang.IllegalArgumentException -The specified group index cannot be less than zero +{"errorClass":"legacy","messageParameters":["The specified group index cannot be less than zero"],"queryContext":[]} -- !query @@ -265,8 +265,8 @@ SELECT regexp_extract_all('abc', col0, 1) FROM VALUES('], [') AS t(col0) -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkRuntimeException +{"errorClass":"INVALID_PARAMETER_VALUE","sqlState":"22023","messageParameters":["regexp","`regexp_extract_all`","], ["],"queryContext":[]} -- !query @@ -283,7 +283,7 @@ SELECT regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something', -2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'regexp_replace('healthy, wealthy, and wise', '\\\\w+thy', 'something', -2)' due to data type mismatch: Position expression must be positive, but got: -2; line 1 pos 7"],"queryContext":[]} -- !query @@ -292,7 +292,7 @@ SELECT regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something', 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'regexp_replace('healthy, wealthy, and wise', '\\\\w+thy', 'something', 0)' due to data type mismatch: Position expression must be positive, but got: 0; line 1 pos 7"],"queryContext":[]} -- !query @@ -564,5 +564,5 @@ SELECT regexp_instr('abc', col0, 1) FROM VALUES(') ?') AS t(col0) -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkRuntimeException +{"errorClass":"INVALID_PARAMETER_VALUE","sqlState":"22023","messageParameters":["regexp","`regexp_instr`",") ?"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out index b75e350ec60f9..d6b950766a55e 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out @@ -195,7 +195,7 @@ SHOW TABLE EXTENDED LIKE 'show_t*' PARTITION(c='Us', d=1) struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchTableException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view 'show_t*' not found in database 'showdb'"],"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`showdb`.`show_t1`'"],"queryContext":[]} -- !query @@ -213,7 +213,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(a='Us', d=1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["a is not a valid partition column in table `spark_catalog`.`showdb`.`show_t1`."],"queryContext":[]} -- !query @@ -222,7 +222,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Ch', d=1) struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Partition not found in table 'show_t1' database 'showdb':\nc -> Ch\nd -> 1"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/show-views.sql.out b/sql/core/src/test/resources/sql-tests/results/show-views.sql.out index 1fafe63a1e498..3ccf752ea8274 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-views.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-views.sql.out @@ -139,7 +139,7 @@ SHOW VIEWS IN wrongdb LIKE 'view_*' struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Database 'wrongdb' not found"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out index db3c245a25920..7eb0be70cda30 100644 --- a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out @@ -91,7 +91,7 @@ SHOW COLUMNS IN badtable FROM showdb struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view not found: showdb.badtable; line 1 pos 16"],"queryContext":[]} -- !query @@ -109,7 +109,7 @@ SHOW COLUMNS IN showdb.showcolumn1 FROM baddb struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["SHOW COLUMNS with conflicting databases: 'baddb' != 'showdb'"],"queryContext":[]} -- !query @@ -127,7 +127,7 @@ SHOW COLUMNS IN showdb.showcolumn3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view not found: showdb.showcolumn3; line 1 pos 16"],"queryContext":[]} -- !query @@ -136,7 +136,7 @@ SHOW COLUMNS IN showcolumn3 FROM showdb struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view not found: showdb.showcolumn3; line 1 pos 16"],"queryContext":[]} -- !query @@ -145,7 +145,7 @@ SHOW COLUMNS IN showcolumn4 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table or view not found: showcolumn4; line 1 pos 16"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out index 98a1e84bda32a..6508db2918e94 100644 --- a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out @@ -93,7 +93,7 @@ SELECT string(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Function string accepts only one argument; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out index 812a28d42bb47..b7c079d9cb123 100644 --- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out @@ -5,7 +5,7 @@ select concat_ws() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["requirement failed: concat_ws requires at least one argument.; line 1 pos 7"],"queryContext":[]} -- !query @@ -14,7 +14,7 @@ select format_string() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["requirement failed: format_string() should take at least 1 argument; line 1 pos 7"],"queryContext":[]} -- !query @@ -648,7 +648,7 @@ select decode() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7"],"queryContext":[]} -- !query @@ -657,7 +657,7 @@ select decode(encode('abc', 'utf-8')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7"],"queryContext":[]} -- !query @@ -1122,7 +1122,7 @@ select to_binary(null, cast(null as int)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1131,7 +1131,7 @@ select to_binary('abc', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1140,7 +1140,7 @@ select to_binary('abc', 'invalidFormat') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid value for the 'format' parameter of function 'to_binary': invalidformat. The value has to be a case-insensitive string literal of 'hex', 'utf-8', or 'base64'."],"queryContext":[]} -- !query @@ -1149,4 +1149,4 @@ select to_binary('a!', 'base64') struct<> -- !query output java.lang.IllegalArgumentException -Last unit does not have enough valid bits +{"errorClass":"legacy","messageParameters":["Last unit does not have enough valid bits"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out index 80eaed011190c..8024899ad273d 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out @@ -38,7 +38,7 @@ select 1 from tab_a where (a1, b1) not in (select (a2, b2) from tab_b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(named_struct('a1', tab_a.a1, 'b1', tab_a.b1) IN (listquery()))' due to data type mismatch: \nThe number of columns in the left hand side of an IN subquery does not match the\nnumber of columns in the output of subquery.\n#columns in left hand side: 2.\n#columns in right hand side: 1.\nLeft side columns:\n[tab_a.a1, tab_a.b1].\nRight side columns:\n[`named_struct(a2, a2, b2, b2)`].; line 1 pos 35"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out index 6e705b8a5de7c..a0872812ed417 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out @@ -43,7 +43,7 @@ AND t2b = (SELECT max(avg) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 't2.t2b' is not an aggregate function. Wrap '(avg(t2.t2b) AS avg)' in windowing function(s) or wrap 't2.t2b' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -60,7 +60,7 @@ WHERE t1a IN (SELECT min(t2a) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Resolved attribute(s) t2b#x missing from min(t2a)#x,t2c#x in operator !Filter t2c#x IN (list#x [t2b#x])."],"queryContext":[]} -- !query @@ -75,7 +75,7 @@ HAVING EXISTS (SELECT t2a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: min((outer(t1.t1a) + t2.t2a))"],"queryContext":[]} -- !query @@ -91,7 +91,7 @@ WHERE t1a IN (SELECT t2a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: min((outer(t2.t2a) + t3.t3a))"],"queryContext":[]} -- !query @@ -105,7 +105,7 @@ WHERE t1a IN (SELECT t2a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Expressions referencing the outer query are not supported outside of WHERE/HAVING clauses:\nAggregate [min(outer(t2a#x)) AS min(outer(t2.t2a))#x]\n+- SubqueryAlias t3\n +- View (`t3`, [t3a#x,t3b#x,t3c#x])\n +- Project [cast(t3a#x as int) AS t3a#x, cast(t3b#x as int) AS t3b#x, cast(t3c#x as int) AS t3c#x]\n +- Project [t3a#x, t3b#x, t3c#x]\n +- SubqueryAlias t3\n +- LocalRelation [t3a#x, t3b#x, t3c#x]\n"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out index 54244128d0bfb..5ad574ad8e470 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out @@ -61,7 +61,7 @@ FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Scalar subquery must return only one column, but got 2"],"queryContext":[]} -- !query @@ -76,7 +76,7 @@ FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Scalar subquery must return only one column, but got 2"],"queryContext":[]} -- !query @@ -89,7 +89,7 @@ t1a IN (SELECT t2a, t2b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(t1.t1a IN (listquery(t1.t1a)))' due to data type mismatch: \nThe number of columns in the left hand side of an IN subquery does not match the\nnumber of columns in the output of subquery.\n#columns in left hand side: 1.\n#columns in right hand side: 2.\nLeft side columns:\n[t1.t1a].\nRight side columns:\n[t2.t2a, t2.t2b].; line 3 pos 4"],"queryContext":[]} -- !query @@ -102,7 +102,7 @@ WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(named_struct('t1a', t1.t1a, 't1b', t1.t1b) IN (listquery(t1.t1a)))' due to data type mismatch: \nThe number of columns in the left hand side of an IN subquery does not match the\nnumber of columns in the output of subquery.\n#columns in left hand side: 2.\n#columns in right hand side: 1.\nLeft side columns:\n[t1.t1a, t1.t1b].\nRight side columns:\n[t2.t2a].; line 3 pos 11"],"queryContext":[]} -- !query @@ -116,4 +116,4 @@ WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(named_struct('t4a', t4.t4a, 't4b', t4.t4b, 't4c', t4.t4c) IN (listquery()))' due to data type mismatch: \nThe data type of one or more elements in the left hand side of an IN subquery\nis not compatible with the data type of the output of the subquery\nMismatched columns:\n[(t4.t4a:double, t5.t5a:timestamp), (t4.t4c:string, t5.t5c:bigint)]\nLeft side:\n[double, string, string].\nRight side:\n[timestamp, string, bigint].; line 3 pos 16"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out b/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out index 962722fada63e..628309410aa80 100644 --- a/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out @@ -39,7 +39,7 @@ SELECT * FROM testData AS t(col1, col2, col3) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Number of column aliases does not match number of columns. Number of column aliases: 3; number of columns: 2.; line 1 pos 14"],"queryContext":[]} -- !query @@ -48,7 +48,7 @@ SELECT * FROM testData AS t(col1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Number of column aliases does not match number of columns. Number of column aliases: 1; number of columns: 2.; line 1 pos 14"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out index 25f4ea505400d..e4b07e3b4caef 100644 --- a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out @@ -5,7 +5,7 @@ select * from dummy(3) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["could not resolve `dummy` to a table-valued function; line 1 pos 14"],"queryContext":[]} -- !query @@ -67,7 +67,7 @@ select * from range(1, 1, 1, 1, 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (integer, integer, integer, integer, integer): Invalid number of arguments for function range. Expected: one of 1, 2, 3 and 4; Found: 5; line 1 pos 14"],"queryContext":[]} -- !query @@ -76,7 +76,7 @@ select * from range(1, null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (integer, void): Incompatible input data type. Expected: long; Found: void; line 1 pos 14"],"queryContext":[]} -- !query @@ -85,7 +85,7 @@ select * from range(array(1, 2, 3)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (array): Incompatible input data type. Expected: long; Found: array; line 1 pos 14"],"queryContext":[]} -- !query @@ -94,7 +94,7 @@ select * from range(0, 5, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (integer, integer, integer): requirement failed: step (0) cannot be 0; line 1 pos 14"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out b/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out index 96ef730f1030c..85e84ef2e0231 100644 --- a/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out @@ -29,7 +29,7 @@ SELECT mydb1.t1 FROM t1 TABLESAMPLE (-1 PERCENT) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nSampling fraction (-0.01) must be on interval [0, 1](line 1, pos 24)\n\n== SQL ==\nSELECT mydb1.t1 FROM t1 TABLESAMPLE (-1 PERCENT)\n------------------------^^^\n"],"queryContext":[]} -- !query @@ -38,7 +38,7 @@ SELECT mydb1.t1 FROM t1 TABLESAMPLE (101 PERCENT) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nSampling fraction (1.01) must be on interval [0, 1](line 1, pos 24)\n\n== SQL ==\nSELECT mydb1.t1 FROM t1 TABLESAMPLE (101 PERCENT)\n------------------------^^^\n"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out index 3efcfd544fae7..473d2d94755d6 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out @@ -45,7 +45,7 @@ SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 45.678, 'CET') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function make_timestamp_ntz. Expected: 6; Found: 7; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out index ae92d14472ce2..0eb6f010611a2 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out @@ -13,7 +13,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"],"queryContext":[]} -- !query @@ -22,7 +22,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"],"queryContext":[]} -- !query @@ -31,7 +31,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"],"queryContext":[]} -- !query @@ -176,7 +176,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -185,7 +185,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -194,7 +194,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -203,7 +203,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -212,7 +212,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -Rounding necessary +{"errorClass":"legacy","messageParameters":["Rounding necessary"],"queryContext":[]} -- !query @@ -590,7 +590,7 @@ select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -599,7 +599,7 @@ select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -632,7 +632,7 @@ select str - timestamp'2011-11-11 11:11:11' from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(ts_view.str - TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -641,7 +641,7 @@ select timestamp'2011-11-11 11:11:11' - str from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -650,7 +650,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -659,7 +659,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' (double and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -668,7 +668,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7"],"queryContext":[]} -- !query @@ -677,7 +677,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index 83add785b6efe..04535ca2f1836 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -13,7 +13,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"],"queryContext":[]} -- !query @@ -22,7 +22,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"],"queryContext":[]} -- !query @@ -31,7 +31,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"],"queryContext":[]} -- !query @@ -105,7 +105,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 61) struct<> -- !query output java.time.DateTimeException -Invalid value for SecondOfMinute (valid values 0 - 59): 61. If necessary set spark.sql.ansi.enabled to false to bypass this error. +{"errorClass":"legacy","messageParameters":["Invalid value for SecondOfMinute (valid values 0 - 59): 61. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} -- !query @@ -130,7 +130,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 99.999999) struct<> -- !query output java.time.DateTimeException -Invalid value for SecondOfMinute (valid values 0 - 59): 99. If necessary set spark.sql.ansi.enabled to false to bypass this error. +{"errorClass":"legacy","messageParameters":["Invalid value for SecondOfMinute (valid values 0 - 59): 99. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} -- !query @@ -139,7 +139,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 999.999999) struct<> -- !query output java.time.DateTimeException -Invalid value for SecondOfMinute (valid values 0 - 59): 999. If necessary set spark.sql.ansi.enabled to false to bypass this error. +{"errorClass":"legacy","messageParameters":["Invalid value for SecondOfMinute (valid values 0 - 59): 999. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} -- !query @@ -180,7 +180,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -189,7 +189,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -198,7 +198,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -207,7 +207,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -216,7 +216,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -Rounding necessary +{"errorClass":"legacy","messageParameters":["Rounding necessary"],"queryContext":[]} -- !query @@ -657,7 +657,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP_NTZ))' due to data type mismatch: '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP_NTZ))' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp_ntz; line 1 pos 7"],"queryContext":[]} -- !query @@ -666,7 +666,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS TIMESTAMP_NTZ) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: '(CAST('1' AS TIMESTAMP_NTZ) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp_ntz; line 1 pos 7"],"queryContext":[]} -- !query @@ -675,7 +675,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' (timestamp_ntz and void).; line 1 pos 7"],"queryContext":[]} -- !query @@ -684,7 +684,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (void and timestamp_ntz).; line 1 pos 7"],"queryContext":[]} -- !query @@ -716,7 +716,7 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') struct<> -- !query output java.lang.RuntimeException -Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"legacy","messageParameters":["Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"],"queryContext":[]} -- !query @@ -725,7 +725,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output java.lang.RuntimeException -Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"legacy","messageParameters":["Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"],"queryContext":[]} -- !query @@ -734,7 +734,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output java.lang.RuntimeException -Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"legacy","messageParameters":["Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out index 5d858ec119307..da67285241e83 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out @@ -13,7 +13,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"],"queryContext":[]} -- !query @@ -22,7 +22,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"],"queryContext":[]} -- !query @@ -31,7 +31,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"],"queryContext":[]} -- !query @@ -176,7 +176,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -185,7 +185,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -194,7 +194,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -203,7 +203,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -long overflow +{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} -- !query @@ -212,7 +212,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -Rounding necessary +{"errorClass":"legacy","messageParameters":["Rounding necessary"],"queryContext":[]} -- !query @@ -590,7 +590,7 @@ select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -599,7 +599,7 @@ select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP_NTZ '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -632,7 +632,7 @@ select str - timestamp'2011-11-11 11:11:11' from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(ts_view.str - TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -641,7 +641,7 @@ select timestamp'2011-11-11 11:11:11' - str from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -650,7 +650,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp_ntz and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -659,7 +659,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (double and timestamp_ntz).; line 1 pos 7"],"queryContext":[]} -- !query @@ -668,7 +668,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' (timestamp_ntz and void).; line 1 pos 7"],"queryContext":[]} -- !query @@ -677,7 +677,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (void and timestamp_ntz).; line 1 pos 7"],"queryContext":[]} -- !query @@ -709,7 +709,7 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') struct<> -- !query output java.lang.RuntimeException -Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"legacy","messageParameters":["Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"],"queryContext":[]} -- !query @@ -718,7 +718,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output java.lang.RuntimeException -Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"legacy","messageParameters":["Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"],"queryContext":[]} -- !query @@ -727,7 +727,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output java.lang.RuntimeException -Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html +{"errorClass":"legacy","messageParameters":["Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timezone.sql.out b/sql/core/src/test/resources/sql-tests/results/timezone.sql.out index 8612341178a8e..dd8d7407a2a07 100644 --- a/sql/core/src/test/resources/sql-tests/results/timezone.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timezone.sql.out @@ -45,7 +45,7 @@ SET TIME ZONE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInvalid time zone displacement value(line 1, pos 0)\n\n== SQL ==\nSET TIME ZONE\n^^^\n"],"queryContext":[]} -- !query @@ -54,7 +54,7 @@ SET TIME ZONE 'invalid/zone' struct<> -- !query output java.lang.IllegalArgumentException -'invalid/zone' in spark.sql.session.timeZone is invalid. Cannot resolve the given timezone with ZoneId.of(_, ZoneId.SHORT_IDS) +{"errorClass":"legacy","messageParameters":["'invalid/zone' in spark.sql.session.timeZone is invalid. Cannot resolve the given timezone with ZoneId.of(_, ZoneId.SHORT_IDS)"],"queryContext":[]} -- !query @@ -63,7 +63,7 @@ SET TIME ZONE INTERVAL 3 DAYS struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL 3 DAYS\n--------------^^^\n"],"queryContext":[]} -- !query @@ -72,7 +72,7 @@ SET TIME ZONE INTERVAL 24 HOURS struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL 24 HOURS\n--------------^^^\n"],"queryContext":[]} -- !query @@ -81,7 +81,7 @@ SET TIME ZONE INTERVAL '19:40:32' HOUR TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL '19:40:32' HOUR TO SECOND\n--------------^^^\n"],"queryContext":[]} -- !query @@ -90,7 +90,7 @@ SET TIME ZONE INTERVAL 10 HOURS 'GMT+1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nInvalid time zone displacement value(line 1, pos 0)\n\n== SQL ==\nSET TIME ZONE INTERVAL 10 HOURS 'GMT+1'\n^^^\n"],"queryContext":[]} -- !query @@ -99,4 +99,4 @@ SET TIME ZONE INTERVAL 10 HOURS 1 MILLISECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL 10 HOURS 1 MILLISECOND\n--------------^^^\n"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/transform.sql.out b/sql/core/src/test/resources/sql-tests/results/transform.sql.out index d9f63aa8a1970..6ee99acaeab18 100644 --- a/sql/core/src/test/resources/sql-tests/results/transform.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/transform.sql.out @@ -400,7 +400,7 @@ SELECT a, b, decode(c, 'UTF-8'), d, e, f, g, h, i, j, k, l FROM ( struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nLINES TERMINATED BY only supports newline '\\n' right now: @(line 3, pos 4)\n\n== SQL ==\nSELECT a, b, decode(c, 'UTF-8'), d, e, f, g, h, i, j, k, l FROM (\n SELECT TRANSFORM(a, b, c, d, e, f, g, h, i, j, k, l)\n ROW FORMAT DELIMITED\n----^^^\n FIELDS TERMINATED BY ','\n LINES TERMINATED BY '@'\n NULL DEFINED AS 'NULL'\n USING 'cat' AS (\n a string,\n b string,\n c string,\n d string,\n e string,\n f string,\n g string,\n h string,\n i string,\n j string,\n k string,\n l string)\n ROW FORMAT DELIMITED\n FIELDS TERMINATED BY ','\n LINES TERMINATED BY '@'\n NULL DEFINED AS 'NULL'\n FROM t\n) tmp\n"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out index 7396b252e142e..758351f01b2e4 100644 --- a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out @@ -141,7 +141,7 @@ SELECT try_add(interval 2 year, interval 2 second) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2' YEAR + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2' YEAR' is of interval year type.; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out index 9110a17eb1ede..a4bdad588ad3e 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out @@ -77,7 +77,7 @@ SELECT true = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(true = CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -94,7 +94,7 @@ SELECT true = cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(true = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -103,7 +103,7 @@ SELECT true = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(true = CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -176,7 +176,7 @@ SELECT true <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(true <=> CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -193,7 +193,7 @@ SELECT true <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(true <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -202,7 +202,7 @@ SELECT true <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(true <=> CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -275,7 +275,7 @@ SELECT cast('1' as binary) = true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = true)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = true)' (binary and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -292,7 +292,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = true)' (timestamp and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -301,7 +301,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = true)' (date and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -374,7 +374,7 @@ SELECT cast('1' as binary) <=> true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <=> true)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> true)' (binary and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -391,7 +391,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> true)' (timestamp and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -400,7 +400,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> true)' (date and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -473,7 +473,7 @@ SELECT false = cast('0' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(false = CAST('0' AS BINARY))' due to data type mismatch: differing types in '(false = CAST('0' AS BINARY))' (boolean and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -490,7 +490,7 @@ SELECT false = cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(false = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(false = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -499,7 +499,7 @@ SELECT false = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(false = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(false = CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -572,7 +572,7 @@ SELECT false <=> cast('0' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(false <=> CAST('0' AS BINARY))' due to data type mismatch: differing types in '(false <=> CAST('0' AS BINARY))' (boolean and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -589,7 +589,7 @@ SELECT false <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(false <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(false <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -598,7 +598,7 @@ SELECT false <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(false <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(false <=> CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -671,7 +671,7 @@ SELECT cast('0' as binary) = false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('0' AS BINARY) = false)' due to data type mismatch: differing types in '(CAST('0' AS BINARY) = false)' (binary and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -688,7 +688,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = false)' (timestamp and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -697,7 +697,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = false)' (date and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -770,7 +770,7 @@ SELECT cast('0' as binary) <=> false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('0' AS BINARY) <=> false)' due to data type mismatch: differing types in '(CAST('0' AS BINARY) <=> false)' (binary and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -787,7 +787,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> false)' (timestamp and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -796,4 +796,4 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> false)' (date and boolean).; line 1 pos 7"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out index e505da36f4926..0c246d3ed8700 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out @@ -77,7 +77,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast('2' as binary) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE binary END; line 1 pos 7"],"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE boolean END; line 1 pos 7"],"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE timestamp END; line 1 pos 7"],"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE date END; line 1 pos 7"],"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast('2' as binary) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE binary END; line 1 pos 7"],"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast(2 as boolean) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE boolean END; line 1 pos 7"],"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE timestamp END; line 1 pos 7"],"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE date END; line 1 pos 7"],"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE binary END; line 1 pos 7"],"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE boolean END; line 1 pos 7"],"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast('2017-12-11 09:30:00.0' as t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE timestamp END; line 1 pos 7"],"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast('2017-12-11 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE date END; line 1 pos 7"],"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE binary END; line 1 pos 7"],"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE boolean END; line 1 pos 7"],"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast('2017-12-11 09:30:00.0' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE timestamp END; line 1 pos 7"],"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE date END; line 1 pos 7"],"queryContext":[]} -- !query @@ -477,7 +477,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE binary END; line 1 pos 7"],"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE boolean END; line 1 pos 7"],"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast('2017-12-11 09:30:00.0' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE timestamp END; line 1 pos 7"],"queryContext":[]} -- !query @@ -504,7 +504,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast('2017-12-11 09:30:00' as d struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE date END; line 1 pos 7"],"queryContext":[]} -- !query @@ -577,7 +577,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE binary END; line 1 pos 7"],"queryContext":[]} -- !query @@ -586,7 +586,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE boolean END; line 1 pos 7"],"queryContext":[]} -- !query @@ -595,7 +595,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast('2017-12-11 09:30:00.0' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE timestamp END; line 1 pos 7"],"queryContext":[]} -- !query @@ -604,7 +604,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE date END; line 1 pos 7"],"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast('2' as binary) EN struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE binary END; line 1 pos 7"],"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast(2 as boolean) END struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE boolean END; line 1 pos 7"],"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast('2017-12-11 09:30 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE timestamp END; line 1 pos 7"],"queryContext":[]} -- !query @@ -704,7 +704,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast('2017-12-11 09:30 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE date END; line 1 pos 7"],"queryContext":[]} -- !query @@ -777,7 +777,7 @@ SELECT CASE WHEN true THEN cast(1 as string) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN string ELSE binary END; line 1 pos 7"],"queryContext":[]} -- !query @@ -786,7 +786,7 @@ SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN string ELSE boolean END; line 1 pos 7"],"queryContext":[]} -- !query @@ -811,7 +811,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as tinyint) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE tinyint END; line 1 pos 7"],"queryContext":[]} -- !query @@ -820,7 +820,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as smallint) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE smallint END; line 1 pos 7"],"queryContext":[]} -- !query @@ -829,7 +829,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as int) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE int END; line 1 pos 7"],"queryContext":[]} -- !query @@ -838,7 +838,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as bigint) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE bigint END; line 1 pos 7"],"queryContext":[]} -- !query @@ -847,7 +847,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as float) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE float END; line 1 pos 7"],"queryContext":[]} -- !query @@ -856,7 +856,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as double) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE double END; line 1 pos 7"],"queryContext":[]} -- !query @@ -865,7 +865,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as decimal(10, 0)) EN struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE decimal(10,0) END; line 1 pos 7"],"queryContext":[]} -- !query @@ -874,7 +874,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as string) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS STRING) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE string END; line 1 pos 7"],"queryContext":[]} -- !query @@ -891,7 +891,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as boolean) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE boolean END; line 1 pos 7"],"queryContext":[]} -- !query @@ -900,7 +900,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE timestamp END; line 1 pos 7"],"queryContext":[]} -- !query @@ -909,7 +909,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE date END; line 1 pos 7"],"queryContext":[]} -- !query @@ -918,7 +918,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as tinyint) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE tinyint END; line 1 pos 7"],"queryContext":[]} -- !query @@ -927,7 +927,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as smallint) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE smallint END; line 1 pos 7"],"queryContext":[]} -- !query @@ -936,7 +936,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as int) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE int END; line 1 pos 7"],"queryContext":[]} -- !query @@ -945,7 +945,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as bigint) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE bigint END; line 1 pos 7"],"queryContext":[]} -- !query @@ -954,7 +954,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as float) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE float END; line 1 pos 7"],"queryContext":[]} -- !query @@ -963,7 +963,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as double) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE double END; line 1 pos 7"],"queryContext":[]} -- !query @@ -972,7 +972,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as decimal(10, 0)) END struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE decimal(10,0) END; line 1 pos 7"],"queryContext":[]} -- !query @@ -981,7 +981,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as string) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS STRING) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE string END; line 1 pos 7"],"queryContext":[]} -- !query @@ -990,7 +990,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast('2' as binary) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE binary END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1007,7 +1007,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE timestamp END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1016,7 +1016,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE date END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1025,7 +1025,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE tinyint END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1034,7 +1034,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE smallint END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1043,7 +1043,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE int END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1052,7 +1052,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE bigint END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1061,7 +1061,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE float END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1070,7 +1070,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE double END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1079,7 +1079,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE decimal(10,0) END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1096,7 +1096,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE binary END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1105,7 +1105,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE boolean END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1130,7 +1130,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE tinyint END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1139,7 +1139,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as sm struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE smallint END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1148,7 +1148,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as in struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE int END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1157,7 +1157,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as bi struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE bigint END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1166,7 +1166,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as fl struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE float END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1175,7 +1175,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as do struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE double END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1184,7 +1184,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as de struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE decimal(10,0) END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1201,7 +1201,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast('2' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE binary END; line 1 pos 7"],"queryContext":[]} -- !query @@ -1210,7 +1210,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as bo struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE boolean END; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out index 8e1d1dedc8ce9..f6908adc5dfbc 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out @@ -13,7 +13,7 @@ select cast(1 as tinyint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -22,7 +22,7 @@ select cast(1 as smallint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -31,7 +31,7 @@ select cast(1 as int) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS INT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -40,7 +40,7 @@ select cast(1 as bigint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -49,7 +49,7 @@ select cast(1 as float) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -58,7 +58,7 @@ select cast(1 as double) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -67,7 +67,7 @@ select cast(1 as decimal(10, 0)) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -92,7 +92,7 @@ select cast('1' as binary) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -101,7 +101,7 @@ select cast(1 as boolean) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -126,7 +126,7 @@ select interval 2 day + cast(1 as tinyint) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -135,7 +135,7 @@ select interval 2 day + cast(1 as smallint) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -144,7 +144,7 @@ select interval 2 day + cast(1 as int) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS INT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -153,7 +153,7 @@ select interval 2 day + cast(1 as bigint) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -162,7 +162,7 @@ select interval 2 day + cast(1 as float) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -171,7 +171,7 @@ select interval 2 day + cast(1 as double) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -180,7 +180,7 @@ select interval 2 day + cast(1 as decimal(10, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -205,7 +205,7 @@ select interval 2 day + cast('1' as binary) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -214,7 +214,7 @@ select interval 2 day + cast(1 as boolean) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -239,7 +239,7 @@ select cast(1 as tinyint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS TINYINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -248,7 +248,7 @@ select cast(1 as smallint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS SMALLINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -257,7 +257,7 @@ select cast(1 as int) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS INT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -266,7 +266,7 @@ select cast(1 as bigint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS BIGINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -275,7 +275,7 @@ select cast(1 as float) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS FLOAT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -284,7 +284,7 @@ select cast(1 as double) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS DOUBLE) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -293,7 +293,7 @@ select cast(1 as decimal(10, 0)) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS DECIMAL(10,0)) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -318,7 +318,7 @@ select cast('1' as binary) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST('1' AS BINARY) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -327,7 +327,7 @@ select cast(1 as boolean) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS BOOLEAN) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out index b263fe1b0c5f9..bfac6e87dac44 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out @@ -237,7 +237,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -246,7 +246,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -255,7 +255,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -264,7 +264,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -273,7 +273,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -282,7 +282,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -291,7 +291,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -300,7 +300,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -309,7 +309,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -318,7 +318,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -327,7 +327,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -336,7 +336,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -601,7 +601,7 @@ SELECT cast(1 as decimal(3, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -610,7 +610,7 @@ SELECT cast(1 as decimal(5, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -619,7 +619,7 @@ SELECT cast(1 as decimal(10, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -628,7 +628,7 @@ SELECT cast(1 as decimal(20, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -637,7 +637,7 @@ SELECT cast(1 as decimal(3, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -646,7 +646,7 @@ SELECT cast(1 as decimal(5, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -655,7 +655,7 @@ SELECT cast(1 as decimal(10, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -664,7 +664,7 @@ SELECT cast(1 as decimal(20, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -673,7 +673,7 @@ SELECT cast(1 as decimal(3, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -682,7 +682,7 @@ SELECT cast(1 as decimal(5, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -691,7 +691,7 @@ SELECT cast(1 as decimal(10, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -700,7 +700,7 @@ SELECT cast(1 as decimal(20, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -709,7 +709,7 @@ SELECT cast(1 as decimal(3, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -718,7 +718,7 @@ SELECT cast(1 as decimal(5, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -727,7 +727,7 @@ SELECT cast(1 as decimal(10, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -736,7 +736,7 @@ SELECT cast(1 as decimal(20, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -969,7 +969,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -978,7 +978,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -987,7 +987,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -996,7 +996,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1005,7 +1005,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1014,7 +1014,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1023,7 +1023,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1032,7 +1032,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1041,7 +1041,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1050,7 +1050,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1059,7 +1059,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1068,7 +1068,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1333,7 +1333,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) - CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1342,7 +1342,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) - CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1351,7 +1351,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1360,7 +1360,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) - CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1369,7 +1369,7 @@ SELECT cast(1 as decimal(3, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) - CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1378,7 +1378,7 @@ SELECT cast(1 as decimal(5, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) - CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1387,7 +1387,7 @@ SELECT cast(1 as decimal(10, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1396,7 +1396,7 @@ SELECT cast(1 as decimal(20, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) - CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1405,7 +1405,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1414,7 +1414,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1423,7 +1423,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1432,7 +1432,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1441,7 +1441,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1450,7 +1450,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1459,7 +1459,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1468,7 +1468,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -1701,7 +1701,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1710,7 +1710,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1719,7 +1719,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1728,7 +1728,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1737,7 +1737,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1746,7 +1746,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1755,7 +1755,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1764,7 +1764,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1773,7 +1773,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1782,7 +1782,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1791,7 +1791,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1800,7 +1800,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2065,7 +2065,7 @@ SELECT cast(1 as decimal(3, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2074,7 +2074,7 @@ SELECT cast(1 as decimal(5, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2083,7 +2083,7 @@ SELECT cast(1 as decimal(10, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2092,7 +2092,7 @@ SELECT cast(1 as decimal(20, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2101,7 +2101,7 @@ SELECT cast(1 as decimal(3, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2110,7 +2110,7 @@ SELECT cast(1 as decimal(5, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2119,7 +2119,7 @@ SELECT cast(1 as decimal(10, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2128,7 +2128,7 @@ SELECT cast(1 as decimal(20, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2137,7 +2137,7 @@ SELECT cast(1 as decimal(3, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2146,7 +2146,7 @@ SELECT cast(1 as decimal(5, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2155,7 +2155,7 @@ SELECT cast(1 as decimal(10, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2164,7 +2164,7 @@ SELECT cast(1 as decimal(20, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2173,7 +2173,7 @@ SELECT cast(1 as decimal(3, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2182,7 +2182,7 @@ SELECT cast(1 as decimal(5, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2191,7 +2191,7 @@ SELECT cast(1 as decimal(10, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2200,7 +2200,7 @@ SELECT cast(1 as decimal(20, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2433,7 +2433,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2442,7 +2442,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2451,7 +2451,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2460,7 +2460,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2469,7 +2469,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2478,7 +2478,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2487,7 +2487,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2496,7 +2496,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2505,7 +2505,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2514,7 +2514,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2523,7 +2523,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2532,7 +2532,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2797,7 +2797,7 @@ SELECT cast(1 as decimal(3, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2806,7 +2806,7 @@ SELECT cast(1 as decimal(5, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2815,7 +2815,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2824,7 +2824,7 @@ SELECT cast(1 as decimal(20, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2833,7 +2833,7 @@ SELECT cast(1 as decimal(3, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2842,7 +2842,7 @@ SELECT cast(1 as decimal(5, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2851,7 +2851,7 @@ SELECT cast(1 as decimal(10, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2860,7 +2860,7 @@ SELECT cast(1 as decimal(20, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2869,7 +2869,7 @@ SELECT cast(1 as decimal(3, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2878,7 +2878,7 @@ SELECT cast(1 as decimal(5, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2887,7 +2887,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2896,7 +2896,7 @@ SELECT cast(1 as decimal(20, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2905,7 +2905,7 @@ SELECT cast(1 as decimal(3, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2914,7 +2914,7 @@ SELECT cast(1 as decimal(5, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2923,7 +2923,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -2932,7 +2932,7 @@ SELECT cast(1 as decimal(20, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3165,7 +3165,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3174,7 +3174,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3183,7 +3183,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3192,7 +3192,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3201,7 +3201,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3210,7 +3210,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3219,7 +3219,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3228,7 +3228,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3237,7 +3237,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3246,7 +3246,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3255,7 +3255,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3264,7 +3264,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3529,7 +3529,7 @@ SELECT cast(1 as decimal(3, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3538,7 +3538,7 @@ SELECT cast(1 as decimal(5, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3547,7 +3547,7 @@ SELECT cast(1 as decimal(10, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3556,7 +3556,7 @@ SELECT cast(1 as decimal(20, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3565,7 +3565,7 @@ SELECT cast(1 as decimal(3, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3574,7 +3574,7 @@ SELECT cast(1 as decimal(5, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3583,7 +3583,7 @@ SELECT cast(1 as decimal(10, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3592,7 +3592,7 @@ SELECT cast(1 as decimal(20, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3601,7 +3601,7 @@ SELECT cast(1 as decimal(3, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3610,7 +3610,7 @@ SELECT cast(1 as decimal(5, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3619,7 +3619,7 @@ SELECT cast(1 as decimal(10, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3628,7 +3628,7 @@ SELECT cast(1 as decimal(20, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3637,7 +3637,7 @@ SELECT cast(1 as decimal(3, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3646,7 +3646,7 @@ SELECT cast(1 as decimal(5, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3655,7 +3655,7 @@ SELECT cast(1 as decimal(10, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3664,7 +3664,7 @@ SELECT cast(1 as decimal(20, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3897,7 +3897,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(3, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3906,7 +3906,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(5, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3915,7 +3915,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3924,7 +3924,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(20, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3933,7 +3933,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(3, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3942,7 +3942,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(5, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3951,7 +3951,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(10, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3960,7 +3960,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(20, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3969,7 +3969,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(3, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3978,7 +3978,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(5, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3987,7 +3987,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(10, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -3996,7 +3996,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(20, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4261,7 +4261,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4270,7 +4270,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4279,7 +4279,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4288,7 +4288,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4297,7 +4297,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4306,7 +4306,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4315,7 +4315,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4324,7 +4324,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4333,7 +4333,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4342,7 +4342,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4351,7 +4351,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4360,7 +4360,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4369,7 +4369,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4378,7 +4378,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4387,7 +4387,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4396,7 +4396,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4629,7 +4629,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4638,7 +4638,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4647,7 +4647,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4656,7 +4656,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4665,7 +4665,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4674,7 +4674,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4683,7 +4683,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4692,7 +4692,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4701,7 +4701,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4710,7 +4710,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4719,7 +4719,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4728,7 +4728,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -4993,7 +4993,7 @@ SELECT cast(1 as decimal(3, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5002,7 +5002,7 @@ SELECT cast(1 as decimal(5, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5011,7 +5011,7 @@ SELECT cast(1 as decimal(10, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5020,7 +5020,7 @@ SELECT cast(1 as decimal(20, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5061,7 +5061,7 @@ SELECT cast(1 as decimal(3, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5070,7 +5070,7 @@ SELECT cast(1 as decimal(5, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5079,7 +5079,7 @@ SELECT cast(1 as decimal(10, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5088,7 +5088,7 @@ SELECT cast(1 as decimal(20, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5097,7 +5097,7 @@ SELECT cast(1 as decimal(3, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5106,7 +5106,7 @@ SELECT cast(1 as decimal(5, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5115,7 +5115,7 @@ SELECT cast(1 as decimal(10, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5124,7 +5124,7 @@ SELECT cast(1 as decimal(20, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5357,7 +5357,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5366,7 +5366,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5375,7 +5375,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5384,7 +5384,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5393,7 +5393,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(3, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5402,7 +5402,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(5, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5411,7 +5411,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(10, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5420,7 +5420,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(20, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5429,7 +5429,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5438,7 +5438,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5447,7 +5447,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5456,7 +5456,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5721,7 +5721,7 @@ SELECT cast(1 as decimal(3, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5730,7 +5730,7 @@ SELECT cast(1 as decimal(5, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5739,7 +5739,7 @@ SELECT cast(1 as decimal(10, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5748,7 +5748,7 @@ SELECT cast(1 as decimal(20, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5789,7 +5789,7 @@ SELECT cast(1 as decimal(3, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5798,7 +5798,7 @@ SELECT cast(1 as decimal(5, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5807,7 +5807,7 @@ SELECT cast(1 as decimal(10, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5816,7 +5816,7 @@ SELECT cast(1 as decimal(20, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5825,7 +5825,7 @@ SELECT cast(1 as decimal(3, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5834,7 +5834,7 @@ SELECT cast(1 as decimal(5, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5843,7 +5843,7 @@ SELECT cast(1 as decimal(10, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -5852,7 +5852,7 @@ SELECT cast(1 as decimal(20, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6085,7 +6085,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6094,7 +6094,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6103,7 +6103,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6112,7 +6112,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6121,7 +6121,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6130,7 +6130,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6139,7 +6139,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6148,7 +6148,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6157,7 +6157,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6166,7 +6166,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6175,7 +6175,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6184,7 +6184,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6449,7 +6449,7 @@ SELECT cast(1 as decimal(3, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6458,7 +6458,7 @@ SELECT cast(1 as decimal(5, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6467,7 +6467,7 @@ SELECT cast(1 as decimal(10, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6476,7 +6476,7 @@ SELECT cast(1 as decimal(20, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6485,7 +6485,7 @@ SELECT cast(1 as decimal(3, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6494,7 +6494,7 @@ SELECT cast(1 as decimal(5, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6503,7 +6503,7 @@ SELECT cast(1 as decimal(10, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6512,7 +6512,7 @@ SELECT cast(1 as decimal(20, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6521,7 +6521,7 @@ SELECT cast(1 as decimal(3, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6530,7 +6530,7 @@ SELECT cast(1 as decimal(5, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6539,7 +6539,7 @@ SELECT cast(1 as decimal(10, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6548,7 +6548,7 @@ SELECT cast(1 as decimal(20, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6557,7 +6557,7 @@ SELECT cast(1 as decimal(3, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6566,7 +6566,7 @@ SELECT cast(1 as decimal(5, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6575,7 +6575,7 @@ SELECT cast(1 as decimal(10, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6584,7 +6584,7 @@ SELECT cast(1 as decimal(20, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6817,7 +6817,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6826,7 +6826,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6835,7 +6835,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6844,7 +6844,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6853,7 +6853,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(3, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6862,7 +6862,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(5, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6871,7 +6871,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(10, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6880,7 +6880,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(20, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6889,7 +6889,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6898,7 +6898,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6907,7 +6907,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -6916,7 +6916,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7181,7 +7181,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7190,7 +7190,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7199,7 +7199,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7208,7 +7208,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7217,7 +7217,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7226,7 +7226,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7235,7 +7235,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7244,7 +7244,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7253,7 +7253,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7262,7 +7262,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7271,7 +7271,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7280,7 +7280,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7289,7 +7289,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7298,7 +7298,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7307,7 +7307,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7316,7 +7316,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7549,7 +7549,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7558,7 +7558,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7567,7 +7567,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7576,7 +7576,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7585,7 +7585,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7594,7 +7594,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7603,7 +7603,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7612,7 +7612,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7621,7 +7621,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7630,7 +7630,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7639,7 +7639,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7648,7 +7648,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7913,7 +7913,7 @@ SELECT cast(1 as decimal(3, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7922,7 +7922,7 @@ SELECT cast(1 as decimal(5, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7931,7 +7931,7 @@ SELECT cast(1 as decimal(10, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7940,7 +7940,7 @@ SELECT cast(1 as decimal(20, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7949,7 +7949,7 @@ SELECT cast(1 as decimal(3, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7958,7 +7958,7 @@ SELECT cast(1 as decimal(5, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7967,7 +7967,7 @@ SELECT cast(1 as decimal(10, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7976,7 +7976,7 @@ SELECT cast(1 as decimal(20, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7985,7 +7985,7 @@ SELECT cast(1 as decimal(3, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -7994,7 +7994,7 @@ SELECT cast(1 as decimal(5, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8003,7 +8003,7 @@ SELECT cast(1 as decimal(10, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8012,7 +8012,7 @@ SELECT cast(1 as decimal(20, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8021,7 +8021,7 @@ SELECT cast(1 as decimal(3, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8030,7 +8030,7 @@ SELECT cast(1 as decimal(5, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8039,7 +8039,7 @@ SELECT cast(1 as decimal(10, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8048,7 +8048,7 @@ SELECT cast(1 as decimal(20, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8281,7 +8281,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8290,7 +8290,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8299,7 +8299,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8308,7 +8308,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8317,7 +8317,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(3, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8326,7 +8326,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(5, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8335,7 +8335,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(10, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8344,7 +8344,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(20, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8353,7 +8353,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8362,7 +8362,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8371,7 +8371,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8380,7 +8380,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8645,7 +8645,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8654,7 +8654,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8663,7 +8663,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8672,7 +8672,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8681,7 +8681,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8690,7 +8690,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8699,7 +8699,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8708,7 +8708,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8717,7 +8717,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8726,7 +8726,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8735,7 +8735,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8744,7 +8744,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8753,7 +8753,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8762,7 +8762,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8771,7 +8771,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -8780,7 +8780,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9013,7 +9013,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9022,7 +9022,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9031,7 +9031,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9040,7 +9040,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9049,7 +9049,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(3, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9058,7 +9058,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(5, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9067,7 +9067,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(10, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9076,7 +9076,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(20, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9085,7 +9085,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9094,7 +9094,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9103,7 +9103,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9112,7 +9112,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9377,7 +9377,7 @@ SELECT cast(1 as decimal(3, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9386,7 +9386,7 @@ SELECT cast(1 as decimal(5, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9395,7 +9395,7 @@ SELECT cast(1 as decimal(10, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9404,7 +9404,7 @@ SELECT cast(1 as decimal(20, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9445,7 +9445,7 @@ SELECT cast(1 as decimal(3, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9454,7 +9454,7 @@ SELECT cast(1 as decimal(5, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9463,7 +9463,7 @@ SELECT cast(1 as decimal(10, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9472,7 +9472,7 @@ SELECT cast(1 as decimal(20, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9481,7 +9481,7 @@ SELECT cast(1 as decimal(3, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9490,7 +9490,7 @@ SELECT cast(1 as decimal(5, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9499,7 +9499,7 @@ SELECT cast(1 as decimal(10, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -9508,4 +9508,4 @@ SELECT cast(1 as decimal(20, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out index f434b5201f33d..d66765a66d9c3 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out @@ -77,7 +77,7 @@ SELECT cast(1 as tinyint) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('1' AS BINARY))' (tinyint and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT cast(1 as tinyint) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST(1 AS BOOLEAN))' (tinyint and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT cast(1 as tinyint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (tinyint and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT cast(1 as tinyint) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00' AS DATE))' (tinyint and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT cast(1 as smallint) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('1' AS BINARY))' (smallint and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT cast(1 as smallint) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST(1 AS BOOLEAN))' (smallint and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT cast(1 as smallint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (smallint and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SELECT cast(1 as smallint) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00' AS DATE))' (smallint and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT cast(1 as int) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('1' AS BINARY))' (int and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT cast(1 as int) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST(1 AS BOOLEAN))' (int and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT cast(1 as int) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (int and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT cast(1 as int) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00' AS DATE))' (int and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT cast(1 as bigint) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('1' AS BINARY))' (bigint and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT cast(1 as bigint) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST(1 AS BOOLEAN))' (bigint and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT cast(1 as bigint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (bigint and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT cast(1 as bigint) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00' AS DATE))' (bigint and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -477,7 +477,7 @@ SELECT cast(1 as float) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('1' AS BINARY))' (float and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT cast(1 as float) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST(1 AS BOOLEAN))' (float and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT cast(1 as float) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (float and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -504,7 +504,7 @@ SELECT cast(1 as float) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00' AS DATE))' (float and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -577,7 +577,7 @@ SELECT cast(1 as double) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -586,7 +586,7 @@ SELECT cast(1 as double) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -595,7 +595,7 @@ SELECT cast(1 as double) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -604,7 +604,7 @@ SELECT cast(1 as double) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT cast(1 as decimal(10, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -704,7 +704,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -777,7 +777,7 @@ SELECT cast(1 as string) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -786,7 +786,7 @@ SELECT cast(1 as string) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -795,7 +795,7 @@ SELECT cast(1 as string) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -804,7 +804,7 @@ SELECT cast(1 as string) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -813,7 +813,7 @@ SELECT cast('1' as binary) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS TINYINT))' (binary and tinyint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -822,7 +822,7 @@ SELECT cast('1' as binary) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS SMALLINT))' (binary and smallint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -831,7 +831,7 @@ SELECT cast('1' as binary) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS INT))' (binary and int).; line 1 pos 7"],"queryContext":[]} -- !query @@ -840,7 +840,7 @@ SELECT cast('1' as binary) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS BIGINT))' (binary and bigint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -849,7 +849,7 @@ SELECT cast('1' as binary) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS FLOAT))' (binary and float).; line 1 pos 7"],"queryContext":[]} -- !query @@ -858,7 +858,7 @@ SELECT cast('1' as binary) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -867,7 +867,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -876,7 +876,7 @@ SELECT cast('1' as binary) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(CAST(1 AS STRING) AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -885,7 +885,7 @@ SELECT cast('1' as binary) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST('1' AS BINARY))' due to data type mismatch: '(CAST('1' AS BINARY) / CAST('1' AS BINARY))' requires (double or decimal) type, not binary; line 1 pos 7"],"queryContext":[]} -- !query @@ -894,7 +894,7 @@ SELECT cast('1' as binary) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS BOOLEAN))' (binary and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -903,7 +903,7 @@ SELECT cast('1' as binary) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (binary and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -912,7 +912,7 @@ SELECT cast('1' as binary) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00' AS DATE))' (binary and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -921,7 +921,7 @@ SELECT cast(1 as boolean) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS TINYINT))' (boolean and tinyint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -930,7 +930,7 @@ SELECT cast(1 as boolean) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS SMALLINT))' (boolean and smallint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -939,7 +939,7 @@ SELECT cast(1 as boolean) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS INT))' (boolean and int).; line 1 pos 7"],"queryContext":[]} -- !query @@ -948,7 +948,7 @@ SELECT cast(1 as boolean) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS BIGINT))' (boolean and bigint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -957,7 +957,7 @@ SELECT cast(1 as boolean) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS FLOAT))' (boolean and float).; line 1 pos 7"],"queryContext":[]} -- !query @@ -966,7 +966,7 @@ SELECT cast(1 as boolean) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -975,7 +975,7 @@ SELECT cast(1 as boolean) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS DECIMAL(10,0)))' (boolean and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -984,7 +984,7 @@ SELECT cast(1 as boolean) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(CAST(1 AS STRING) AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -993,7 +993,7 @@ SELECT cast(1 as boolean) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1002,7 +1002,7 @@ SELECT cast(1 as boolean) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS BOOLEAN))' due to data type mismatch: '(CAST(1 AS BOOLEAN) / CAST(1 AS BOOLEAN))' requires (double or decimal) type, not boolean; line 1 pos 7"],"queryContext":[]} -- !query @@ -1011,7 +1011,7 @@ SELECT cast(1 as boolean) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1020,7 +1020,7 @@ SELECT cast(1 as boolean) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1029,7 +1029,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS TINYINT))' (timestamp and tinyint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1038,7 +1038,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS SMALLINT))' (timestamp and smallint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1047,7 +1047,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS INT))' (timestamp and int).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1056,7 +1056,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BIGINT))' (timestamp and bigint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1065,7 +1065,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS FLOAT))' (timestamp and float).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1074,7 +1074,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1083,7 +1083,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1092,7 +1092,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(CAST(1 AS STRING) AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1101,7 +1101,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS BINARY))' (timestamp and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1110,7 +1110,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BOOLEAN))' (timestamp and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1119,7 +1119,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' requires (double or decimal) type, not timestamp; line 1 pos 7"],"queryContext":[]} -- !query @@ -1128,7 +1128,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00' AS DATE))' (timestamp and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1137,7 +1137,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS TINYINT))' (date and tinyint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1146,7 +1146,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS SMALLINT))' (date and smallint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1155,7 +1155,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS INT))' (date and int).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1164,7 +1164,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BIGINT))' (date and bigint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1173,7 +1173,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS FLOAT))' (date and float).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1182,7 +1182,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DOUBLE))' (date and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1191,7 +1191,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1200,7 +1200,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(CAST(1 AS STRING) AS DOUBLE))' (date and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1209,7 +1209,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS BINARY))' (date and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1218,7 +1218,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BOOLEAN))' (date and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1227,7 +1227,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast('2017-12-11 09:30:00.0' as tim struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (date and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1236,4 +1236,4 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00' AS DATE))' requires (double or decimal) type, not date; line 1 pos 7"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out index d1f0c32c1e16f..167b4d0182496 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out @@ -77,7 +77,7 @@ SELECT IF(true, cast(1 as tinyint), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2' AS BINARY)))' (tinyint and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT IF(true, cast(1 as tinyint), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST(2 AS BOOLEAN)))' (tinyint and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT IF(true, cast(1 as tinyint), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (tinyint and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT IF(true, cast(1 as tinyint), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' (tinyint and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT IF(true, cast(1 as smallint), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2' AS BINARY)))' (smallint and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT IF(true, cast(1 as smallint), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BOOLEAN)))' (smallint and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT IF(true, cast(1 as smallint), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (smallint and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SELECT IF(true, cast(1 as smallint), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' (smallint and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT IF(true, cast(1 as int), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS INT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2' AS BINARY)))' (int and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT IF(true, cast(1 as int), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS INT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST(2 AS BOOLEAN)))' (int and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT IF(true, cast(1 as int), cast('2017-12-11 09:30:00.0' as timestamp)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (int and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT IF(true, cast(1 as int), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' (int and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT IF(true, cast(1 as bigint), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2' AS BINARY)))' (bigint and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT IF(true, cast(1 as bigint), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST(2 AS BOOLEAN)))' (bigint and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT IF(true, cast(1 as bigint), cast('2017-12-11 09:30:00.0' as timestamp)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (bigint and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT IF(true, cast(1 as bigint), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' (bigint and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -477,7 +477,7 @@ SELECT IF(true, cast(1 as float), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2' AS BINARY)))' (float and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT IF(true, cast(1 as float), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST(2 AS BOOLEAN)))' (float and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT IF(true, cast(1 as float), cast('2017-12-11 09:30:00.0' as timestamp)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (float and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -504,7 +504,7 @@ SELECT IF(true, cast(1 as float), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' (float and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -577,7 +577,7 @@ SELECT IF(true, cast(1 as double), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2' AS BINARY)))' (double and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -586,7 +586,7 @@ SELECT IF(true, cast(1 as double), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST(2 AS BOOLEAN)))' (double and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -595,7 +595,7 @@ SELECT IF(true, cast(1 as double), cast('2017-12-11 09:30:00.0' as timestamp)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -604,7 +604,7 @@ SELECT IF(true, cast(1 as double), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' (double and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2' AS BINARY)))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS BOOLEAN)))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00.0' as times struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -704,7 +704,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00' as date)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -777,7 +777,7 @@ SELECT IF(true, cast(1 as string), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS STRING), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS STRING), CAST('2' AS BINARY)))' (string and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -786,7 +786,7 @@ SELECT IF(true, cast(1 as string), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))' (string and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -811,7 +811,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS TINYINT)))' (binary and tinyint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -820,7 +820,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS SMALLINT)))' (binary and smallint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -829,7 +829,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS INT)))' (binary and int).; line 1 pos 7"],"queryContext":[]} -- !query @@ -838,7 +838,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS BIGINT)))' (binary and bigint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -847,7 +847,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS FLOAT)))' (binary and float).; line 1 pos 7"],"queryContext":[]} -- !query @@ -856,7 +856,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS DOUBLE)))' (binary and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -865,7 +865,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS DECIMAL(10,0))))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -874,7 +874,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS STRING)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS STRING)))' (binary and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -891,7 +891,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS BOOLEAN)))' (binary and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -900,7 +900,7 @@ SELECT IF(true, cast('1' as binary), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (binary and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -909,7 +909,7 @@ SELECT IF(true, cast('1' as binary), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' (binary and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -918,7 +918,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS TINYINT)))' (boolean and tinyint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -927,7 +927,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS SMALLINT)))' (boolean and smallint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -936,7 +936,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS INT)))' (boolean and int).; line 1 pos 7"],"queryContext":[]} -- !query @@ -945,7 +945,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS BIGINT)))' (boolean and bigint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -954,7 +954,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS FLOAT)))' (boolean and float).; line 1 pos 7"],"queryContext":[]} -- !query @@ -963,7 +963,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DOUBLE)))' (boolean and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -972,7 +972,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DECIMAL(10,0))))' (boolean and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -981,7 +981,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))' (boolean and string).; line 1 pos 7"],"queryContext":[]} -- !query @@ -990,7 +990,7 @@ SELECT IF(true, cast(1 as boolean), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2' AS BINARY)))' (boolean and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1007,7 +1007,7 @@ SELECT IF(true, cast(1 as boolean), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (boolean and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1016,7 +1016,7 @@ SELECT IF(true, cast(1 as boolean), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' (boolean and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1025,7 +1025,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as tinyint)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS TINYINT)))' (timestamp and tinyint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1034,7 +1034,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as smallint)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS SMALLINT)))' (timestamp and smallint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1043,7 +1043,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as int)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS INT)))' (timestamp and int).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1052,7 +1052,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as bigint)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BIGINT)))' (timestamp and bigint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1061,7 +1061,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as float)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS FLOAT)))' (timestamp and float).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1070,7 +1070,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as double)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DOUBLE)))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1079,7 +1079,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as decimal(10 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DECIMAL(10,0))))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1096,7 +1096,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast('2' as binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('2' AS BINARY)))' (timestamp and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1105,7 +1105,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as boolean)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BOOLEAN)))' (timestamp and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1130,7 +1130,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS TINYINT)))' (date and tinyint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1139,7 +1139,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS SMALLINT)))' (date and smallint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1148,7 +1148,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS INT)))' (date and int).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1157,7 +1157,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BIGINT)))' (date and bigint).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1166,7 +1166,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS FLOAT)))' (date and float).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1175,7 +1175,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DOUBLE)))' (date and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1184,7 +1184,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as decimal(10, 0))) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DECIMAL(10,0))))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1201,7 +1201,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST('2' AS BINARY)))' (date and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1210,7 +1210,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BOOLEAN)))' (date and boolean).; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out index 89db8e95cc593..7d6eccd61d262 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out @@ -77,7 +77,7 @@ SELECT cast(1 as tinyint) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: tinyint != binary; line 1 pos 26"],"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT cast(1 as tinyint) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: tinyint != boolean; line 1 pos 26"],"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT cast(1 as tinyint) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: tinyint != timestamp; line 1 pos 26"],"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT cast(1 as tinyint) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: tinyint != date; line 1 pos 26"],"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT cast(1 as smallint) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: smallint != binary; line 1 pos 27"],"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT cast(1 as smallint) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: smallint != boolean; line 1 pos 27"],"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT cast(1 as smallint) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: smallint != timestamp; line 1 pos 27"],"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SELECT cast(1 as smallint) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: smallint != date; line 1 pos 27"],"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT cast(1 as int) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: int != binary; line 1 pos 22"],"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT cast(1 as int) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: int != boolean; line 1 pos 22"],"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT cast(1 as int) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: int != timestamp; line 1 pos 22"],"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT cast(1 as int) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: int != date; line 1 pos 22"],"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT cast(1 as bigint) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: bigint != binary; line 1 pos 25"],"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT cast(1 as bigint) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: bigint != boolean; line 1 pos 25"],"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT cast(1 as bigint) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: bigint != timestamp; line 1 pos 25"],"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT cast(1 as bigint) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: bigint != date; line 1 pos 25"],"queryContext":[]} -- !query @@ -477,7 +477,7 @@ SELECT cast(1 as float) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: float != binary; line 1 pos 24"],"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT cast(1 as float) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: float != boolean; line 1 pos 24"],"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT cast(1 as float) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: float != timestamp; line 1 pos 24"],"queryContext":[]} -- !query @@ -504,7 +504,7 @@ SELECT cast(1 as float) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: float != date; line 1 pos 24"],"queryContext":[]} -- !query @@ -577,7 +577,7 @@ SELECT cast(1 as double) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: double != binary; line 1 pos 25"],"queryContext":[]} -- !query @@ -586,7 +586,7 @@ SELECT cast(1 as double) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: double != boolean; line 1 pos 25"],"queryContext":[]} -- !query @@ -595,7 +595,7 @@ SELECT cast(1 as double) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: double != timestamp; line 1 pos 25"],"queryContext":[]} -- !query @@ -604,7 +604,7 @@ SELECT cast(1 as double) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: double != date; line 1 pos 25"],"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != binary; line 1 pos 33"],"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != boolean; line 1 pos 33"],"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != timestamp; line 1 pos 33"],"queryContext":[]} -- !query @@ -704,7 +704,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != date; line 1 pos 33"],"queryContext":[]} -- !query @@ -777,7 +777,7 @@ SELECT cast(1 as string) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS STRING) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: string != binary; line 1 pos 25"],"queryContext":[]} -- !query @@ -786,7 +786,7 @@ SELECT cast(1 as string) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: string != boolean; line 1 pos 25"],"queryContext":[]} -- !query @@ -811,7 +811,7 @@ SELECT cast('1' as binary) in (cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: binary != tinyint; line 1 pos 27"],"queryContext":[]} -- !query @@ -820,7 +820,7 @@ SELECT cast('1' as binary) in (cast(1 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: binary != smallint; line 1 pos 27"],"queryContext":[]} -- !query @@ -829,7 +829,7 @@ SELECT cast('1' as binary) in (cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: binary != int; line 1 pos 27"],"queryContext":[]} -- !query @@ -838,7 +838,7 @@ SELECT cast('1' as binary) in (cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: binary != bigint; line 1 pos 27"],"queryContext":[]} -- !query @@ -847,7 +847,7 @@ SELECT cast('1' as binary) in (cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: binary != float; line 1 pos 27"],"queryContext":[]} -- !query @@ -856,7 +856,7 @@ SELECT cast('1' as binary) in (cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: binary != double; line 1 pos 27"],"queryContext":[]} -- !query @@ -865,7 +865,7 @@ SELECT cast('1' as binary) in (cast(1 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: binary != decimal(10,0); line 1 pos 27"],"queryContext":[]} -- !query @@ -874,7 +874,7 @@ SELECT cast('1' as binary) in (cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: binary != string; line 1 pos 27"],"queryContext":[]} -- !query @@ -891,7 +891,7 @@ SELECT cast('1' as binary) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: binary != boolean; line 1 pos 27"],"queryContext":[]} -- !query @@ -900,7 +900,7 @@ SELECT cast('1' as binary) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: binary != timestamp; line 1 pos 27"],"queryContext":[]} -- !query @@ -909,7 +909,7 @@ SELECT cast('1' as binary) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: binary != date; line 1 pos 27"],"queryContext":[]} -- !query @@ -918,7 +918,7 @@ SELECT true in (cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: boolean != tinyint; line 1 pos 12"],"queryContext":[]} -- !query @@ -927,7 +927,7 @@ SELECT true in (cast(1 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: boolean != smallint; line 1 pos 12"],"queryContext":[]} -- !query @@ -936,7 +936,7 @@ SELECT true in (cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: boolean != int; line 1 pos 12"],"queryContext":[]} -- !query @@ -945,7 +945,7 @@ SELECT true in (cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: boolean != bigint; line 1 pos 12"],"queryContext":[]} -- !query @@ -954,7 +954,7 @@ SELECT true in (cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: boolean != float; line 1 pos 12"],"queryContext":[]} -- !query @@ -963,7 +963,7 @@ SELECT true in (cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: boolean != double; line 1 pos 12"],"queryContext":[]} -- !query @@ -972,7 +972,7 @@ SELECT true in (cast(1 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: boolean != decimal(10,0); line 1 pos 12"],"queryContext":[]} -- !query @@ -981,7 +981,7 @@ SELECT true in (cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: boolean != string; line 1 pos 12"],"queryContext":[]} -- !query @@ -990,7 +990,7 @@ SELECT true in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: boolean != binary; line 1 pos 12"],"queryContext":[]} -- !query @@ -1007,7 +1007,7 @@ SELECT true in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: boolean != timestamp; line 1 pos 12"],"queryContext":[]} -- !query @@ -1016,7 +1016,7 @@ SELECT true in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: boolean != date; line 1 pos 12"],"queryContext":[]} -- !query @@ -1025,7 +1025,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != tinyint; line 1 pos 50"],"queryContext":[]} -- !query @@ -1034,7 +1034,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as smallint)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != smallint; line 1 pos 50"],"queryContext":[]} -- !query @@ -1043,7 +1043,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS INT)))' due to data type mismatch: Arguments must be same type but were: timestamp != int; line 1 pos 50"],"queryContext":[]} -- !query @@ -1052,7 +1052,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != bigint; line 1 pos 50"],"queryContext":[]} -- !query @@ -1061,7 +1061,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: timestamp != float; line 1 pos 50"],"queryContext":[]} -- !query @@ -1070,7 +1070,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: timestamp != double; line 1 pos 50"],"queryContext":[]} -- !query @@ -1079,7 +1079,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as decimal(10, 0))) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: timestamp != decimal(10,0); line 1 pos 50"],"queryContext":[]} -- !query @@ -1096,7 +1096,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2' as binary)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: timestamp != binary; line 1 pos 50"],"queryContext":[]} -- !query @@ -1105,7 +1105,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: timestamp != boolean; line 1 pos 50"],"queryContext":[]} -- !query @@ -1130,7 +1130,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: date != tinyint; line 1 pos 43"],"queryContext":[]} -- !query @@ -1139,7 +1139,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: date != smallint; line 1 pos 43"],"queryContext":[]} -- !query @@ -1148,7 +1148,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS INT)))' due to data type mismatch: Arguments must be same type but were: date != int; line 1 pos 43"],"queryContext":[]} -- !query @@ -1157,7 +1157,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: date != bigint; line 1 pos 43"],"queryContext":[]} -- !query @@ -1166,7 +1166,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: date != float; line 1 pos 43"],"queryContext":[]} -- !query @@ -1175,7 +1175,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: date != double; line 1 pos 43"],"queryContext":[]} -- !query @@ -1184,7 +1184,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: date != decimal(10,0); line 1 pos 43"],"queryContext":[]} -- !query @@ -1201,7 +1201,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: date != binary; line 1 pos 43"],"queryContext":[]} -- !query @@ -1210,7 +1210,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: date != boolean; line 1 pos 43"],"queryContext":[]} -- !query @@ -1299,7 +1299,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: tinyint != binary; line 1 pos 26"],"queryContext":[]} -- !query @@ -1308,7 +1308,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: tinyint != boolean; line 1 pos 26"],"queryContext":[]} -- !query @@ -1317,7 +1317,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast('2017-12-11 09:30:00.0' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: tinyint != timestamp; line 1 pos 26"],"queryContext":[]} -- !query @@ -1326,7 +1326,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: tinyint != date; line 1 pos 26"],"queryContext":[]} -- !query @@ -1399,7 +1399,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: smallint != binary; line 1 pos 27"],"queryContext":[]} -- !query @@ -1408,7 +1408,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: smallint != boolean; line 1 pos 27"],"queryContext":[]} -- !query @@ -1417,7 +1417,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: smallint != timestamp; line 1 pos 27"],"queryContext":[]} -- !query @@ -1426,7 +1426,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: smallint != date; line 1 pos 27"],"queryContext":[]} -- !query @@ -1499,7 +1499,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: int != binary; line 1 pos 22"],"queryContext":[]} -- !query @@ -1508,7 +1508,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: int != boolean; line 1 pos 22"],"queryContext":[]} -- !query @@ -1517,7 +1517,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast('2017-12-11 09:30:00.0' as timest struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: int != timestamp; line 1 pos 22"],"queryContext":[]} -- !query @@ -1526,7 +1526,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast('2017-12-11 09:30:00' as date)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: int != date; line 1 pos 22"],"queryContext":[]} -- !query @@ -1599,7 +1599,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: bigint != binary; line 1 pos 25"],"queryContext":[]} -- !query @@ -1608,7 +1608,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: bigint != boolean; line 1 pos 25"],"queryContext":[]} -- !query @@ -1617,7 +1617,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast('2017-12-11 09:30:00.0' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: bigint != timestamp; line 1 pos 25"],"queryContext":[]} -- !query @@ -1626,7 +1626,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast('2017-12-11 09:30:00' as da struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: bigint != date; line 1 pos 25"],"queryContext":[]} -- !query @@ -1699,7 +1699,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: float != binary; line 1 pos 24"],"queryContext":[]} -- !query @@ -1708,7 +1708,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: float != boolean; line 1 pos 24"],"queryContext":[]} -- !query @@ -1717,7 +1717,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: float != timestamp; line 1 pos 24"],"queryContext":[]} -- !query @@ -1726,7 +1726,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast('2017-12-11 09:30:00' as date struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: float != date; line 1 pos 24"],"queryContext":[]} -- !query @@ -1799,7 +1799,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: double != binary; line 1 pos 25"],"queryContext":[]} -- !query @@ -1808,7 +1808,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: double != boolean; line 1 pos 25"],"queryContext":[]} -- !query @@ -1817,7 +1817,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast('2017-12-11 09:30:00.0' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: double != timestamp; line 1 pos 25"],"queryContext":[]} -- !query @@ -1826,7 +1826,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast('2017-12-11 09:30:00' as da struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: double != date; line 1 pos 25"],"queryContext":[]} -- !query @@ -1899,7 +1899,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast('1' as bina struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != binary; line 1 pos 33"],"queryContext":[]} -- !query @@ -1908,7 +1908,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast(1 as boolea struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != boolean; line 1 pos 33"],"queryContext":[]} -- !query @@ -1917,7 +1917,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast('2017-12-11 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != timestamp; line 1 pos 33"],"queryContext":[]} -- !query @@ -1926,7 +1926,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast('2017-12-11 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != date; line 1 pos 33"],"queryContext":[]} -- !query @@ -1999,7 +1999,7 @@ SELECT cast(1 as string) in (cast(1 as string), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: string != binary; line 1 pos 25"],"queryContext":[]} -- !query @@ -2008,7 +2008,7 @@ SELECT cast(1 as string) in (cast(1 as string), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: string != boolean; line 1 pos 25"],"queryContext":[]} -- !query @@ -2033,7 +2033,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: binary != tinyint; line 1 pos 27"],"queryContext":[]} -- !query @@ -2042,7 +2042,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: binary != smallint; line 1 pos 27"],"queryContext":[]} -- !query @@ -2051,7 +2051,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: binary != int; line 1 pos 27"],"queryContext":[]} -- !query @@ -2060,7 +2060,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: binary != bigint; line 1 pos 27"],"queryContext":[]} -- !query @@ -2069,7 +2069,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: binary != float; line 1 pos 27"],"queryContext":[]} -- !query @@ -2078,7 +2078,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: binary != double; line 1 pos 27"],"queryContext":[]} -- !query @@ -2087,7 +2087,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as decimal(10, 0))) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: binary != decimal(10,0); line 1 pos 27"],"queryContext":[]} -- !query @@ -2096,7 +2096,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: binary != string; line 1 pos 27"],"queryContext":[]} -- !query @@ -2113,7 +2113,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: binary != boolean; line 1 pos 27"],"queryContext":[]} -- !query @@ -2122,7 +2122,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: binary != timestamp; line 1 pos 27"],"queryContext":[]} -- !query @@ -2131,7 +2131,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: binary != date; line 1 pos 27"],"queryContext":[]} -- !query @@ -2140,7 +2140,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: boolean != tinyint; line 1 pos 28"],"queryContext":[]} -- !query @@ -2149,7 +2149,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as smallint)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: boolean != smallint; line 1 pos 28"],"queryContext":[]} -- !query @@ -2158,7 +2158,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: boolean != int; line 1 pos 28"],"queryContext":[]} -- !query @@ -2167,7 +2167,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: boolean != bigint; line 1 pos 28"],"queryContext":[]} -- !query @@ -2176,7 +2176,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: boolean != float; line 1 pos 28"],"queryContext":[]} -- !query @@ -2185,7 +2185,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: boolean != double; line 1 pos 28"],"queryContext":[]} -- !query @@ -2194,7 +2194,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as decimal(10, 0))) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: boolean != decimal(10,0); line 1 pos 28"],"queryContext":[]} -- !query @@ -2203,7 +2203,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: boolean != string; line 1 pos 28"],"queryContext":[]} -- !query @@ -2212,7 +2212,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast('1' as binary)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: boolean != binary; line 1 pos 28"],"queryContext":[]} -- !query @@ -2229,7 +2229,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast('2017-12-11 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: boolean != timestamp; line 1 pos 28"],"queryContext":[]} -- !query @@ -2238,7 +2238,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast('2017-12-11 09:30:00' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: boolean != date; line 1 pos 28"],"queryContext":[]} -- !query @@ -2247,7 +2247,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != tinyint; line 1 pos 50"],"queryContext":[]} -- !query @@ -2256,7 +2256,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != smallint; line 1 pos 50"],"queryContext":[]} -- !query @@ -2265,7 +2265,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: timestamp != int; line 1 pos 50"],"queryContext":[]} -- !query @@ -2274,7 +2274,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != bigint; line 1 pos 50"],"queryContext":[]} -- !query @@ -2283,7 +2283,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: timestamp != float; line 1 pos 50"],"queryContext":[]} -- !query @@ -2292,7 +2292,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: timestamp != double; line 1 pos 50"],"queryContext":[]} -- !query @@ -2301,7 +2301,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: timestamp != decimal(10,0); line 1 pos 50"],"queryContext":[]} -- !query @@ -2318,7 +2318,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: timestamp != binary; line 1 pos 50"],"queryContext":[]} -- !query @@ -2327,7 +2327,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: timestamp != boolean; line 1 pos 50"],"queryContext":[]} -- !query @@ -2352,7 +2352,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: date != tinyint; line 1 pos 43"],"queryContext":[]} -- !query @@ -2361,7 +2361,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: date != smallint; line 1 pos 43"],"queryContext":[]} -- !query @@ -2370,7 +2370,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: date != int; line 1 pos 43"],"queryContext":[]} -- !query @@ -2379,7 +2379,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: date != bigint; line 1 pos 43"],"queryContext":[]} -- !query @@ -2388,7 +2388,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: date != float; line 1 pos 43"],"queryContext":[]} -- !query @@ -2397,7 +2397,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: date != double; line 1 pos 43"],"queryContext":[]} -- !query @@ -2406,7 +2406,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: date != decimal(10,0); line 1 pos 43"],"queryContext":[]} -- !query @@ -2423,7 +2423,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: date != binary; line 1 pos 43"],"queryContext":[]} -- !query @@ -2432,7 +2432,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: date != boolean; line 1 pos 43"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out index 4bd8da674c52c..db4b954ee06a1 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out @@ -82,7 +82,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'map_zip_with(various_maps.decimal_map1, various_maps.decimal_map2, lambdafunction(struct(k, v1, v2), k, v1, v2))' due to argument data type mismatch: The input to function map_zip_with should have been two maps with compatible key types, but the key types are [decimal(36,0), decimal(36,35)].; line 1 pos 7"],"queryContext":[]} -- !query @@ -110,7 +110,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'map_zip_with(various_maps.decimal_map2, various_maps.int_map, lambdafunction(struct(k, v1, v2), k, v1, v2))' due to argument data type mismatch: The input to function map_zip_with should have been two maps with compatible key types, but the key types are [decimal(36,35), int].; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out index b67ed4e751776..3522516b2b5f1 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out @@ -91,7 +91,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'map_concat(various_maps.tinyint_map1, various_maps.array_map1)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,array>]; line 2 pos 4"],"queryContext":[]} -- !query @@ -102,7 +102,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'map_concat(various_maps.boolean_map1, various_maps.int_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map]; line 2 pos 4"],"queryContext":[]} -- !query @@ -113,7 +113,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'map_concat(various_maps.int_map1, various_maps.struct_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,struct>]; line 2 pos 4"],"queryContext":[]} -- !query @@ -124,7 +124,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'map_concat(various_maps.struct_map1, various_maps.array_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map,struct>, map,array>]; line 2 pos 4"],"queryContext":[]} -- !query @@ -135,4 +135,4 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'map_concat(various_maps.int_map1, various_maps.array_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,array>]; line 2 pos 4"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out index 35ca14b6f3381..54429cc7a2f63 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out @@ -77,7 +77,7 @@ SELECT '1' + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT '1' + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT '1' + cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT '1' + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT '1' - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) - CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT '1' - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) - CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT '1' - cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '('1' - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -276,7 +276,7 @@ SELECT '1' * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -285,7 +285,7 @@ SELECT '1' * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -294,7 +294,7 @@ SELECT '1' * cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -303,7 +303,7 @@ SELECT '1' * cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -376,7 +376,7 @@ SELECT '1' / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -385,7 +385,7 @@ SELECT '1' / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -394,7 +394,7 @@ SELECT '1' / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -403,7 +403,7 @@ SELECT '1' / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -476,7 +476,7 @@ SELECT '1' % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -485,7 +485,7 @@ SELECT '1' % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -494,7 +494,7 @@ SELECT '1' % cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -503,7 +503,7 @@ SELECT '1' % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -576,7 +576,7 @@ SELECT pmod('1', cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} -- !query @@ -585,7 +585,7 @@ SELECT pmod('1', cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} -- !query @@ -594,7 +594,7 @@ SELECT pmod('1', cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} -- !query @@ -603,7 +603,7 @@ SELECT pmod('1', cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"],"queryContext":[]} -- !query @@ -668,7 +668,7 @@ SELECT cast('1' as binary) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT cast(1 as boolean) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) + CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -760,7 +760,7 @@ SELECT cast('1' as binary) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) - CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -769,7 +769,7 @@ SELECT cast(1 as boolean) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) - CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) - CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -778,7 +778,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - '1')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -787,7 +787,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -852,7 +852,7 @@ SELECT cast('1' as binary) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -861,7 +861,7 @@ SELECT cast(1 as boolean) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) * CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -870,7 +870,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) * CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -879,7 +879,7 @@ SELECT cast('2017-12-11 09:30:00' as date) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) * CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -944,7 +944,7 @@ SELECT cast('1' as binary) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -953,7 +953,7 @@ SELECT cast(1 as boolean) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -962,7 +962,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -971,7 +971,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1036,7 +1036,7 @@ SELECT cast('1' as binary) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1045,7 +1045,7 @@ SELECT cast(1 as boolean) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) % CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1054,7 +1054,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1063,7 +1063,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1128,7 +1128,7 @@ SELECT pmod(cast('1' as binary), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS BINARY), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1137,7 +1137,7 @@ SELECT pmod(cast(1 as boolean), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS BOOLEAN), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS BOOLEAN), CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1146,7 +1146,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} -- !query @@ -1155,7 +1155,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out index b5055eaed94fd..cbd1d288bcd6e 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out @@ -101,7 +101,7 @@ select cast(a as array) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 't.a' due to data type mismatch: cannot cast string to array; line 1 pos 7"],"queryContext":[]} -- !query @@ -110,7 +110,7 @@ select cast(a as struct) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 't.a' due to data type mismatch: cannot cast string to struct; line 1 pos 7"],"queryContext":[]} -- !query @@ -119,7 +119,7 @@ select cast(a as map) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 't.a' due to data type mismatch: cannot cast string to map; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out index 602c86025febd..96aff310ab25a 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out @@ -85,7 +85,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with tinyint at same column of first table"],"queryContext":[]} -- !query @@ -94,7 +94,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with tinyint at same column of first table"],"queryContext":[]} -- !query @@ -103,7 +103,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with tinyint at same column of first table"],"queryContext":[]} -- !query @@ -112,7 +112,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with tinyint at same column of first table"],"queryContext":[]} -- !query @@ -193,7 +193,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with smallint at same column of first table"],"queryContext":[]} -- !query @@ -202,7 +202,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with smallint at same column of first table"],"queryContext":[]} -- !query @@ -211,7 +211,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with smallint at same column of first table"],"queryContext":[]} -- !query @@ -220,7 +220,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast('2017-12-11 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with smallint at same column of first table"],"queryContext":[]} -- !query @@ -301,7 +301,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with int at same column of first table"],"queryContext":[]} -- !query @@ -310,7 +310,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with int at same column of first table"],"queryContext":[]} -- !query @@ -319,7 +319,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as timest struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with int at same column of first table"],"queryContext":[]} -- !query @@ -328,7 +328,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with int at same column of first table"],"queryContext":[]} -- !query @@ -409,7 +409,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with bigint at same column of first table"],"queryContext":[]} -- !query @@ -418,7 +418,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with bigint at same column of first table"],"queryContext":[]} -- !query @@ -427,7 +427,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as tim struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with bigint at same column of first table"],"queryContext":[]} -- !query @@ -436,7 +436,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with bigint at same column of first table"],"queryContext":[]} -- !query @@ -517,7 +517,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with float at same column of first table"],"queryContext":[]} -- !query @@ -526,7 +526,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with float at same column of first table"],"queryContext":[]} -- !query @@ -535,7 +535,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as time struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with float at same column of first table"],"queryContext":[]} -- !query @@ -544,7 +544,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with float at same column of first table"],"queryContext":[]} -- !query @@ -625,7 +625,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with double at same column of first table"],"queryContext":[]} -- !query @@ -634,7 +634,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with double at same column of first table"],"queryContext":[]} -- !query @@ -643,7 +643,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as tim struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with double at same column of first table"],"queryContext":[]} -- !query @@ -652,7 +652,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with double at same column of first table"],"queryContext":[]} -- !query @@ -733,7 +733,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with decimal(10,0) at same column of first table"],"queryContext":[]} -- !query @@ -742,7 +742,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with decimal(10,0) at same column of first table"],"queryContext":[]} -- !query @@ -751,7 +751,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast('2017-12-11 09:30:00.0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with decimal(10,0) at same column of first table"],"queryContext":[]} -- !query @@ -760,7 +760,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast('2017-12-11 09:30:00' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with decimal(10,0) at same column of first table"],"queryContext":[]} -- !query @@ -841,7 +841,7 @@ SELECT cast(1 as string) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with string at same column of first table"],"queryContext":[]} -- !query @@ -850,7 +850,7 @@ SELECT cast(1 as string) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with string at same column of first table"],"queryContext":[]} -- !query @@ -877,7 +877,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with binary at same column of first table"],"queryContext":[]} -- !query @@ -886,7 +886,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with binary at same column of first table"],"queryContext":[]} -- !query @@ -895,7 +895,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with binary at same column of first table"],"queryContext":[]} -- !query @@ -904,7 +904,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with binary at same column of first table"],"queryContext":[]} -- !query @@ -913,7 +913,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with binary at same column of first table"],"queryContext":[]} -- !query @@ -922,7 +922,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with binary at same column of first table"],"queryContext":[]} -- !query @@ -931,7 +931,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with binary at same column of first table"],"queryContext":[]} -- !query @@ -940,7 +940,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is string type which is not compatible with binary at same column of first table"],"queryContext":[]} -- !query @@ -958,7 +958,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with binary at same column of first table"],"queryContext":[]} -- !query @@ -967,7 +967,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with binary at same column of first table"],"queryContext":[]} -- !query @@ -976,7 +976,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast('2017-12-11 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with binary at same column of first table"],"queryContext":[]} -- !query @@ -985,7 +985,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with boolean at same column of first table"],"queryContext":[]} -- !query @@ -994,7 +994,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with boolean at same column of first table"],"queryContext":[]} -- !query @@ -1003,7 +1003,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with boolean at same column of first table"],"queryContext":[]} -- !query @@ -1012,7 +1012,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with boolean at same column of first table"],"queryContext":[]} -- !query @@ -1021,7 +1021,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with boolean at same column of first table"],"queryContext":[]} -- !query @@ -1030,7 +1030,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with boolean at same column of first table"],"queryContext":[]} -- !query @@ -1039,7 +1039,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with boolean at same column of first table"],"queryContext":[]} -- !query @@ -1048,7 +1048,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is string type which is not compatible with boolean at same column of first table"],"queryContext":[]} -- !query @@ -1057,7 +1057,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with boolean at same column of first table"],"queryContext":[]} -- !query @@ -1074,7 +1074,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with boolean at same column of first table"],"queryContext":[]} -- !query @@ -1083,7 +1083,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with boolean at same column of first table"],"queryContext":[]} -- !query @@ -1092,7 +1092,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with timestamp at same column of first table"],"queryContext":[]} -- !query @@ -1101,7 +1101,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with timestamp at same column of first table"],"queryContext":[]} -- !query @@ -1110,7 +1110,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with timestamp at same column of first table"],"queryContext":[]} -- !query @@ -1119,7 +1119,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with timestamp at same column of first table"],"queryContext":[]} -- !query @@ -1128,7 +1128,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with timestamp at same column of first table"],"queryContext":[]} -- !query @@ -1137,7 +1137,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with timestamp at same column of first table"],"queryContext":[]} -- !query @@ -1146,7 +1146,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with timestamp at same column of first table"],"queryContext":[]} -- !query @@ -1164,7 +1164,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast('2' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with timestamp at same column of first table"],"queryContext":[]} -- !query @@ -1173,7 +1173,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with timestamp at same column of first table"],"queryContext":[]} -- !query @@ -1200,7 +1200,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as tinyint struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with date at same column of first table"],"queryContext":[]} -- !query @@ -1209,7 +1209,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as smallin struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with date at same column of first table"],"queryContext":[]} -- !query @@ -1218,7 +1218,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as int) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with date at same column of first table"],"queryContext":[]} -- !query @@ -1227,7 +1227,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as bigint) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with date at same column of first table"],"queryContext":[]} -- !query @@ -1236,7 +1236,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as float) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with date at same column of first table"],"queryContext":[]} -- !query @@ -1245,7 +1245,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as double) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with date at same column of first table"],"queryContext":[]} -- !query @@ -1254,7 +1254,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as decimal struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with date at same column of first table"],"queryContext":[]} -- !query @@ -1272,7 +1272,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast('2' as binar struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with date at same column of first table"],"queryContext":[]} -- !query @@ -1281,7 +1281,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as boolean struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with date at same column of first table"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out index ff11f2a3b4b31..3ef0c23a2b27a 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out @@ -165,7 +165,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast(1 as string) DESC RANGE BETWE struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS STRING) FOLLOWING' due to data type mismatch: The data type of the upper bound 'string' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21"],"queryContext":[]} -- !query @@ -174,7 +174,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast('1' as binary) DESC RANGE BET struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS BINARY) FOLLOWING' due to data type mismatch: The data type of the upper bound 'binary' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21"],"queryContext":[]} -- !query @@ -183,7 +183,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast(1 as boolean) DESC RANGE BETW struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS BOOLEAN) FOLLOWING' due to data type mismatch: The data type of the upper bound 'boolean' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21"],"queryContext":[]} -- !query @@ -192,7 +192,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY 1 ORDER BY CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) DESC NULLS LAST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 21"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udaf.sql.out b/sql/core/src/test/resources/sql-tests/results/udaf.sql.out index 9d948f6ee1290..e76bfab8b950b 100644 --- a/sql/core/src/test/resources/sql-tests/results/udaf.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udaf.sql.out @@ -31,7 +31,7 @@ SELECT default.myDoubleAvg(int_col1, 3) as my_avg from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function spark_catalog.default.myDoubleAvg. Expected: 1; Found: 2; line 1 pos 7"],"queryContext":[]} -- !query @@ -48,7 +48,7 @@ SELECT default.udaf1(int_col1) as udaf1 from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Can not load class 'test.non.existent.udaf' when registering the function 'spark_catalog.default.udaf1', please make sure it is on the classpath; line 1 pos 7"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out index c18dce09f73a9..980904ff90467 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out @@ -475,7 +475,7 @@ having exists (select 1 from onek b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: sum(DISTINCT (outer(a.four) + b.four))"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out index d9691fd6f45ba..dc93cee640741 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out @@ -5,7 +5,7 @@ select udf(max(min(unique1))) from tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out index 5d5ffa1311b74..00dba9f0336ea 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out @@ -546,7 +546,7 @@ SELECT udf('') AS `xxx`, udf(i) AS i, udf(k), udf(t) AS t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'i' is ambiguous, could be: spark_catalog.default.j1_tbl.i, spark_catalog.default.j2_tbl.i.; line 1 pos 29"],"queryContext":[]} -- !query @@ -3263,7 +3263,7 @@ select * from struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'f1' is ambiguous, could be: j.f1, j.f1.; line 2 pos 72"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out index 8589d74709bcd..1d665b7d0e892 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out @@ -140,7 +140,7 @@ SELECT udf(a) FROM test_having HAVING udf(min(a)) < udf(max(a)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'spark_catalog.default.test_having.a' is not an aggregate function. Wrap '(min(spark_catalog.default.test_having.a) AS `min(a#x)`, max(spark_catalog.default.test_having.a) AS `max(a#x)`)' in windowing function(s) or wrap 'spark_catalog.default.test_having.a' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -173,8 +173,8 @@ SELECT 1 AS one FROM test_having WHERE 1/udf(a) = 1 HAVING 1 < 2 -- !query schema struct<> -- !query output -org.apache.spark.SparkException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +org.apache.spark.SparkArithmeticException +{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":39,"stopIndex":46,"fragment":"1/udf(a"}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out index 5e1a31205f6ab..02ada1190b8e3 100755 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out @@ -203,7 +203,7 @@ SELECT udf(c), udf(count(*)) FROM test_missing_target GROUP BY 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 63"],"queryContext":[]} -- !query @@ -214,7 +214,7 @@ SELECT udf(count(*)) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 14"],"queryContext":[]} -- !query @@ -390,7 +390,7 @@ SELECT udf(count(udf(x.a))) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 14"],"queryContext":[]} -- !query @@ -415,7 +415,7 @@ SELECT udf(count(udf(b))) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Reference 'b' is ambiguous, could be: x.b, y.b.; line 1 pos 21"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out index 6394e6cf0ab9c..38d551625b98a 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out @@ -138,7 +138,7 @@ SELECT array(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["ExceptAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table"],"queryContext":[]} -- !query @@ -210,7 +210,7 @@ SELECT k, v FROM tab4 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["ExceptAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out index bc8536c67e350..05638b055ec9b 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out @@ -100,4 +100,4 @@ WHERE udf(t1.v) >= (SELECT min(udf(t2.v)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Correlated column is not allowed in predicate (CAST(udf(cast(k as string)) AS STRING) = CAST(udf(cast(outer(k#x) as string)) AS STRING)):\nAggregate [cast(udf(cast(max(cast(udf(cast(v#x as string)) as int)) as string)) as int) AS udf(max(udf(v)))#x]\n+- Filter (cast(udf(cast(k#x as string)) as string) = cast(udf(cast(outer(k#x) as string)) as string))\n +- SubqueryAlias t2\n +- View (`t2`, [k#x,v#x])\n +- Project [cast(k#x as string) AS k#x, cast(v#x as int) AS v#x]\n +- Project [k#x, v#x]\n +- SubqueryAlias t2\n +- LocalRelation [k#x, v#x]\n"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out index 490a1f56ff6bf..597a30d222ce2 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out @@ -207,7 +207,7 @@ SELECT course, udf(year), GROUPING(course) FROM courseSales GROUP BY course, udf struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["grouping() can only be used with GroupingSets/Cube/Rollup"],"queryContext":[]} -- !query @@ -216,7 +216,7 @@ SELECT course, udf(year), GROUPING_ID(course, year) FROM courseSales GROUP BY ud struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["grouping_id() can only be used with GroupingSets/Cube/Rollup"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out index fb31cf222071d..1eae366102452 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out @@ -15,7 +15,7 @@ SELECT udf(a), udf(COUNT(b)) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(CAST(udf(cast(count(b) as string)) AS BIGINT) AS `udf(count(b))`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -43,7 +43,7 @@ SELECT udf(a), udf(COUNT(udf(b))) FROM testData GROUP BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -107,7 +107,7 @@ SELECT udf(a + 2), udf(COUNT(b)) FROM testData GROUP BY a + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -164,7 +164,7 @@ SELECT udf(COUNT(b)) AS k FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["aggregate functions are not allowed in GROUP BY, but found CAST(udf(cast(count(b) as string)) AS BIGINT)"],"queryContext":[]} -- !query @@ -182,7 +182,7 @@ SELECT k AS a, udf(COUNT(udf(v))) FROM testDataHasSameNameWithAlias GROUP BY udf struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expression 'testdatahassamenamewithalias.k' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -271,7 +271,7 @@ SELECT udf(id) FROM range(10) HAVING id > 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'id' is not an aggregate function. Wrap '()' in windowing function(s) or wrap 'id' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT every(udf(1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'every(CAST(udf(cast(1 as string)) AS INT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS INT)' is of int type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT some(udf(1S)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'some(CAST(udf(cast(1 as string)) AS SMALLINT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS SMALLINT)' is of smallint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT any(udf(1L)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'any(CAST(udf(cast(1 as string)) AS BIGINT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS BIGINT)' is of bigint type.; line 1 pos 7"],"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT udf(every("true")) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'every('true')' due to data type mismatch: argument 1 requires boolean type, however, ''true'' is of string type.; line 1 pos 11"],"queryContext":[]} -- !query @@ -490,7 +490,7 @@ SELECT udf(count(*)) FROM test_agg WHERE count(*) > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(count(1) > 1L)]\nInvalid expressions: [count(1)]"],"queryContext":[]} -- !query @@ -499,7 +499,7 @@ SELECT udf(count(*)) FROM test_agg WHERE count(*) + 1L > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [((count(1) + 1L) > 1L)]\nInvalid expressions: [count(1)]"],"queryContext":[]} -- !query @@ -508,4 +508,4 @@ SELECT udf(count(*)) FROM test_agg WHERE k = 1 or k = 2 or count(*) + 1L > 1L or struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(((test_agg.k = 1) OR (test_agg.k = 2)) OR (((count(1) + 1L) > 1L) OR (max(test_agg.k) > 1)))]\nInvalid expressions: [count(1), max(test_agg.k)]"],"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out index 2218f2082b04b..3c4060abad878 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out @@ -94,7 +94,7 @@ select udf(a), b from values ("one", rand(5)), ("two", 3.0D) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot evaluate expression rand(5) in inline table definition; line 1 pos 37"],"queryContext":[]} -- !query @@ -103,7 +103,7 @@ select udf(a), udf(b) from values ("one", 2.0), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expected 2 columns but found 1 columns in row 1; line 1 pos 27"],"queryContext":[]} -- !query @@ -112,7 +112,7 @@ select udf(a), udf(b) from values ("one", array(0, 1)), ("two", struct(1, 2)) as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["incompatible types found in column b for inline table; line 1 pos 27"],"queryContext":[]} -- !query @@ -121,7 +121,7 @@ select udf(a), udf(b) from values ("one"), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["expected 2 columns but found 1 columns in row 0; line 1 pos 27"],"queryContext":[]} -- !query @@ -130,7 +130,7 @@ select udf(a), udf(b) from values ("one", random_not_exist_func(1)), ("two", 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Undefined function: random_not_exist_func. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.random_not_exist_func.; line 1 pos 42"],"queryContext":[]} -- !query @@ -139,7 +139,7 @@ select udf(a), udf(b) from values ("one", count(1)), ("two", 2) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot evaluate expression count(1) in inline table definition; line 1 pos 42"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out index 5b241c5e8cb1b..a42c4d162531d 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out @@ -95,7 +95,7 @@ SELECT array(1), udf(2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["IntersectAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table"],"queryContext":[]} -- !query @@ -106,7 +106,7 @@ SELECT udf(k), udf(v) FROM tab2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["IntersectAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out index 99a6602306bb7..ef7d02985b05c 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out @@ -199,7 +199,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Aggregate expression required for pivot, but 'coursesales.earnings' did not appear in any aggregate function."],"queryContext":[]} -- !query @@ -214,7 +214,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Aggregate expression required for pivot, but '__auto_generated_subquery_name.year' did not appear in any aggregate function."],"queryContext":[]} -- !query @@ -259,7 +259,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out index 6a4ff02d5efc7..a7441bb82e04e 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out @@ -31,7 +31,7 @@ SELECT default.myDoubleAvg(udf(int_col1), udf(3)) as my_avg from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function spark_catalog.default.myDoubleAvg. Expected: 1; Found: 2; line 1 pos 7"],"queryContext":[]} -- !query @@ -48,7 +48,7 @@ SELECT default.udaf1(udf(int_col1)) as udaf1, udf(default.udaf1(udf(int_col1))) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Can not load class 'test.non.existent.udaf' when registering the function 'spark_catalog.default.udaf1', please make sure it is on the classpath; line 1 pos 94"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out index 5b47d3633e22a..80ef954652169 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out @@ -58,7 +58,7 @@ ROWS BETWEEN CURRENT ROW AND 2147483648 FOLLOWING) FROM testData ORDER BY udf(ca struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'ROWS BETWEEN CURRENT ROW AND 2147483648L FOLLOWING' due to data type mismatch: The data type of the upper bound 'bigint' does not match the expected data type 'int'.; line 1 pos 46"],"queryContext":[]} -- !query @@ -188,7 +188,7 @@ ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING) FROM testData ORDER BY cate, u struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING' due to data type mismatch: Window frame upper bound '1' does not follow the lower bound 'unboundedfollowing$()'.; line 1 pos 38"],"queryContext":[]} -- !query @@ -198,7 +198,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, udf(val) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 38"],"queryContext":[]} -- !query @@ -208,7 +208,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, udf(val) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY CAST(udf(cast(val as string)) AS INT) ASC NULLS FIRST, testdata.cate ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: cast(udf(cast(val#x as string)) as int) ASC NULLS FIRST,cate#x ASC NULLS FIRST; line 1 pos 38"],"queryContext":[]} -- !query @@ -218,7 +218,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, udf(val) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY current_timestamp() ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 38"],"queryContext":[]} -- !query @@ -228,7 +228,7 @@ RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING) FROM testData ORDER BY udf(cate), val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING' due to data type mismatch: The lower bound of a window frame must be less than or equal to the upper bound; line 1 pos 38"],"queryContext":[]} -- !query @@ -238,7 +238,7 @@ RANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cat struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nFrame bound value must be a literal.(line 2, pos 30)\n\n== SQL ==\nSELECT udf(val), cate, count(val) OVER(PARTITION BY udf(cate) ORDER BY udf(val)\nRANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cate, val(val)\n------------------------------^^^\n"],"queryContext":[]} -- !query @@ -315,7 +315,7 @@ SELECT udf(val), cate, row_number() OVER(PARTITION BY cate) FROM testData ORDER struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Window function row_number() requires window to be ordered, please add ORDER BY clause. For example SELECT row_number()(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table"],"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/window.sql.out b/sql/core/src/test/resources/sql-tests/results/window.sql.out index dd1f3cdd36a87..afd16c8998ceb 100644 --- a/sql/core/src/test/resources/sql-tests/results/window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/window.sql.out @@ -102,7 +102,7 @@ ROWS BETWEEN CURRENT ROW AND 2147483648 FOLLOWING) FROM testData ORDER BY cate, struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'ROWS BETWEEN CURRENT ROW AND 2147483648L FOLLOWING' due to data type mismatch: The data type of the upper bound 'bigint' does not match the expected data type 'int'.; line 1 pos 41"],"queryContext":[]} -- !query @@ -324,7 +324,7 @@ ORDER BY cate, val_date struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val_date ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1 02:03:04.001' DAY TO SECOND FOLLOWING)' due to data type mismatch: The data type 'date' used in the order specification does not match the data type 'interval day to second' which is used in the range frame.; line 1 pos 46"],"queryContext":[]} -- !query @@ -351,7 +351,7 @@ ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING) FROM testData ORDER BY cate, v struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING' due to data type mismatch: Window frame upper bound '1' does not follow the lower bound 'unboundedfollowing$()'.; line 1 pos 33"],"queryContext":[]} -- !query @@ -361,7 +361,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY testdata.cate RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 33"],"queryContext":[]} -- !query @@ -371,7 +371,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val ASC NULLS FIRST, testdata.cate ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: val#x ASC NULLS FIRST,cate#x ASC NULLS FIRST; line 1 pos 33"],"queryContext":[]} -- !query @@ -381,7 +381,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY testdata.cate ORDER BY current_timestamp() ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 33"],"queryContext":[]} -- !query @@ -391,7 +391,7 @@ RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["cannot resolve 'RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING' due to data type mismatch: The lower bound of a window frame must be less than or equal to the upper bound; line 1 pos 33"],"queryContext":[]} -- !query @@ -401,7 +401,7 @@ RANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cat struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["\nFrame bound value must be a literal.(line 2, pos 30)\n\n== SQL ==\nSELECT val, cate, count(val) OVER(PARTITION BY cate ORDER BY val\nRANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cate, val\n------------------------------^^^\n"],"queryContext":[]} -- !query @@ -478,7 +478,7 @@ SELECT val, cate, row_number() OVER(PARTITION BY cate) FROM testData ORDER BY ca struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Window function row_number() requires window to be ordered, please add ORDER BY clause. For example SELECT row_number()(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table"],"queryContext":[]} -- !query @@ -550,7 +550,7 @@ FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["window aggregate function with filter predicate is not supported yet."],"queryContext":[]} -- !query @@ -1185,7 +1185,7 @@ FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Window specification w is not defined in the WINDOW clause."],"queryContext":[]} -- !query @@ -1197,4 +1197,4 @@ FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":null,"messageParameters":[],"queryContext":[]} +{"errorClass":"legacy","messageParameters":["Window specification w is not defined in the WINDOW clause."],"queryContext":[]} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala index b40e7ffafbaae..4b8a038c5127b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala @@ -24,7 +24,7 @@ import org.json4s.JsonAST.{JArray, JObject} import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods.{compact, render} -import org.apache.spark.SparkThrowable +import org.apache.spark.{SparkException, SparkThrowable} import org.apache.spark.sql.catalyst.planning.PhysicalOperation import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.execution.HiveResult.hiveResultString @@ -93,6 +93,13 @@ trait SQLQueryTestHelper { compact(render(jValue)) } + private def toLegacyJson(msg: String): String = { + val jValue = ("errorClass" -> "legacy") ~ + ("messageParameters" -> JArray(List(JString(msg)))) ~ + ("queryContext" -> JArray(List.empty)) + compact(render(jValue)) + } + /** * This method handles exceptions occurred during query execution as they may need special care * to become comparable to the expected output. @@ -103,14 +110,27 @@ trait SQLQueryTestHelper { try { result } catch { - case e: SparkThrowable => + case e: SparkThrowable if e.getErrorClass != null => + (emptySchema, Seq(e.getClass.getName, toJson(e))) + case a: AnalysisException => // Do not output the logical plan tree which contains expression IDs. // Also implement a crude way of masking expression IDs in the error message // with a generic pattern "###". - (emptySchema, Seq(e.getClass.getName, toJson(e).replaceAll("#\\d+", "#x"))) + val msg = if (a.plan.nonEmpty) a.getSimpleMessage else a.getMessage + (emptySchema, Seq(a.getClass.getName, toLegacyJson(msg.replaceAll("#\\d+", "#x")))) + case s: SparkException if s.getCause != null => + // For a runtime exception, it is hard to match because its message contains + // information of stage, task ID, etc. + // To make result matching simpler, here we match the cause of the exception if it exists. + s.getCause match { + case e: SparkThrowable if e.getErrorClass != null => + (emptySchema, Seq(e.getClass.getName, toJson(e))) + case cause => + (emptySchema, Seq(cause.getClass.getName, toLegacyJson(cause.getMessage))) + } case NonFatal(e) => // If there is an exception, put the exception class followed by the message. - (emptySchema, Seq(e.getClass.getName, e.getMessage)) + (emptySchema, Seq(e.getClass.getName, toLegacyJson(e.getMessage))) } } } From f8db16fdc408d6529d71db3481266e71f1e5407c Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Wed, 10 Aug 2022 22:15:18 +0500 Subject: [PATCH 3/4] Change the type of messageParameters from array to map --- .../sql-tests/results/ansi/array.sql.out | 34 +- .../sql-tests/results/ansi/cast.sql.out | 100 +-- .../sql-tests/results/ansi/date.sql.out | 44 +- .../ansi/datetime-parsing-invalid.sql.out | 56 +- .../ansi/decimalArithmeticOperations.sql.out | 20 +- .../ansi/higher-order-functions.sql.out | 2 +- .../sql-tests/results/ansi/interval.sql.out | 240 +++--- .../sql-tests/results/ansi/literals.sql.out | 44 +- .../sql-tests/results/ansi/map.sql.out | 12 +- .../results/ansi/string-functions.sql.out | 26 +- .../sql-tests/results/ansi/timestamp.sql.out | 56 +- .../results/ansi/try_arithmetic.sql.out | 2 +- .../ansi/try_datetime_functions.sql.out | 2 +- .../results/ansi/try_element_at.sql.out | 2 +- .../resources/sql-tests/results/array.sql.out | 8 +- .../sql-tests/results/bitwise.sql.out | 8 +- .../resources/sql-tests/results/cast.sql.out | 14 +- .../ceil-floor-with-scale-param.sql.out | 12 +- .../sql-tests/results/change-column.sql.out | 16 +- .../sql-tests/results/charvarchar.sql.out | 4 +- .../results/columnresolution-negative.sql.out | 28 +- .../sql-tests/results/comments.sql.out | 4 +- .../resources/sql-tests/results/count.sql.out | 4 +- .../sql-tests/results/csv-functions.sql.out | 18 +- .../sql-tests/results/cte-legacy.sql.out | 2 +- .../sql-tests/results/cte-nested.sql.out | 16 +- .../resources/sql-tests/results/cte.sql.out | 10 +- .../resources/sql-tests/results/date.sql.out | 42 +- .../datetime-formatting-invalid.sql.out | 74 +- .../datetime-formatting-legacy.sql.out | 12 +- .../sql-tests/results/datetime-legacy.sql.out | 68 +- .../results/datetime-parsing-invalid.sql.out | 18 +- .../sql-tests/results/describe-query.sql.out | 6 +- .../sql-tests/results/describe.sql.out | 10 +- .../sql-tests/results/except-all.sql.out | 4 +- .../sql-tests/results/extract.sql.out | 22 +- .../sql-tests/results/group-analytics.sql.out | 16 +- .../sql-tests/results/group-by-filter.sql.out | 6 +- .../results/group-by-ordinal.sql.out | 20 +- .../sql-tests/results/group-by.sql.out | 36 +- .../sql-tests/results/grouping_set.sql.out | 6 +- .../sql-tests/results/having.sql.out | 2 +- .../results/higher-order-functions.sql.out | 2 +- .../sql-tests/results/ilike-all.sql.out | 2 +- .../sql-tests/results/ilike-any.sql.out | 2 +- .../sql-tests/results/inline-table.sql.out | 12 +- .../sql-tests/results/intersect-all.sql.out | 4 +- .../sql-tests/results/interval.sql.out | 218 +++--- .../sql-tests/results/join-lateral.sql.out | 20 +- .../sql-tests/results/json-functions.sql.out | 32 +- .../sql-tests/results/like-all.sql.out | 2 +- .../sql-tests/results/like-any.sql.out | 2 +- .../resources/sql-tests/results/limit.sql.out | 12 +- .../sql-tests/results/literals.sql.out | 44 +- .../resources/sql-tests/results/map.sql.out | 4 +- .../sql-tests/results/misc-functions.sql.out | 14 +- .../sql-tests/results/natural-join.sql.out | 2 +- .../results/order-by-ordinal.sql.out | 6 +- .../sql-tests/results/percentiles.sql.out | 24 +- .../resources/sql-tests/results/pivot.sql.out | 18 +- .../postgreSQL/aggregates_part1.sql.out | 4 +- .../postgreSQL/aggregates_part3.sql.out | 2 +- .../results/postgreSQL/boolean.sql.out | 32 +- .../results/postgreSQL/create_view.sql.out | 36 +- .../sql-tests/results/postgreSQL/date.sql.out | 30 +- .../results/postgreSQL/float4.sql.out | 14 +- .../results/postgreSQL/float8.sql.out | 10 +- .../sql-tests/results/postgreSQL/int4.sql.out | 12 +- .../sql-tests/results/postgreSQL/int8.sql.out | 24 +- .../results/postgreSQL/interval.sql.out | 20 +- .../sql-tests/results/postgreSQL/join.sql.out | 20 +- .../results/postgreSQL/limit.sql.out | 4 +- .../results/postgreSQL/numeric.sql.out | 12 +- .../results/postgreSQL/select_having.sql.out | 6 +- .../postgreSQL/select_implicit.sql.out | 12 +- .../results/postgreSQL/strings.sql.out | 16 +- .../sql-tests/results/postgreSQL/text.sql.out | 14 +- .../results/postgreSQL/union.sql.out | 20 +- .../results/postgreSQL/window_part2.sql.out | 6 +- .../results/postgreSQL/window_part3.sql.out | 24 +- .../results/postgreSQL/window_part4.sql.out | 2 +- .../sql-tests/results/postgreSQL/with.sql.out | 8 +- .../results/query_regex_column.sql.out | 16 +- .../sql-tests/results/random.sql.out | 4 +- .../results/regexp-functions.sql.out | 34 +- .../sql-tests/results/show-tables.sql.out | 12 +- .../sql-tests/results/show-views.sql.out | 2 +- .../sql-tests/results/show_columns.sql.out | 10 +- .../sql-compatibility-functions.sql.out | 2 +- .../results/string-functions.sql.out | 18 +- .../subquery/in-subquery/in-basic.sql.out | 2 +- .../invalid-correlation.sql.out | 12 +- .../subq-input-typecheck.sql.out | 10 +- .../sql-tests/results/table-aliases.sql.out | 6 +- .../results/table-valued-functions.sql.out | 10 +- .../results/tablesample-negative.sql.out | 4 +- .../sql-tests/results/timestamp-ntz.sql.out | 2 +- .../sql-tests/results/timestamp.sql.out | 44 +- .../timestampNTZ/timestamp-ansi.sql.out | 54 +- .../results/timestampNTZ/timestamp.sql.out | 40 +- .../sql-tests/results/timezone.sql.out | 14 +- .../sql-tests/results/transform.sql.out | 12 +- .../sql-tests/results/try_arithmetic.sql.out | 2 +- .../results/try_datetime_functions.sql.out | 2 +- .../sql-tests/results/try_element_at.sql.out | 2 +- .../native/booleanEquality.sql.out | 48 +- .../native/caseWhenCoercion.sql.out | 140 ++-- .../native/dateTimeOperations.sql.out | 54 +- .../native/decimalPrecision.sql.out | 704 +++++++++--------- .../typeCoercion/native/division.sql.out | 160 ++-- .../typeCoercion/native/ifCoercion.sql.out | 140 ++-- .../typeCoercion/native/inConversion.sql.out | 280 +++---- .../typeCoercion/native/mapZipWith.sql.out | 4 +- .../typeCoercion/native/mapconcat.sql.out | 10 +- .../native/promoteStrings.sql.out | 94 +-- .../native/stringCastAndExpressions.sql.out | 12 +- .../native/widenSetOperationTypes.sql.out | 140 ++-- .../native/windowFrameCoercion.sql.out | 8 +- .../resources/sql-tests/results/udaf.sql.out | 4 +- .../postgreSQL/udf-aggregates_part1.sql.out | 4 +- .../postgreSQL/udf-aggregates_part3.sql.out | 2 +- .../results/udf/postgreSQL/udf-join.sql.out | 20 +- .../udf/postgreSQL/udf-select_having.sql.out | 6 +- .../postgreSQL/udf-select_implicit.sql.out | 12 +- .../results/udf/udf-except-all.sql.out | 4 +- .../sql-tests/results/udf/udf-except.sql.out | 2 +- .../results/udf/udf-group-analytics.sql.out | 12 +- .../results/udf/udf-group-by.sql.out | 28 +- .../results/udf/udf-inline-table.sql.out | 12 +- .../results/udf/udf-intersect-all.sql.out | 4 +- .../sql-tests/results/udf/udf-pivot.sql.out | 18 +- .../sql-tests/results/udf/udf-udaf.sql.out | 4 +- .../sql-tests/results/udf/udf-window.sql.out | 16 +- .../sql-tests/results/window.sql.out | 26 +- .../apache/spark/sql/SQLQueryTestHelper.scala | 6 +- 135 files changed, 2082 insertions(+), 2080 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out index 18ac5bfbd618d..30aec1a265925 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out @@ -128,7 +128,7 @@ select sort_array(array('b', 'd'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'sort_array(array('b', 'd'), '1')' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'sort_array(array('b', 'd'), '1')' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select sort_array(array('b', 'd'), cast(NULL as boolean)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'sort_array(array('b', 'd'), CAST(NULL AS BOOLEAN))' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'sort_array(array('b', 'd'), CAST(NULL AS BOOLEAN))' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -165,7 +165,7 @@ select element_at(array(1, 2, 3), 5) struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -{"errorClass":"INVALID_ARRAY_INDEX_IN_ELEMENT_AT","messageParameters":["5","3","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":35,"fragment":"element_at(array(1, 2, 3), 5"}]} +{"errorClass":"INVALID_ARRAY_INDEX_IN_ELEMENT_AT","messageParameters":{"indexValue":"5","arraySize":"3","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":35,"fragment":"element_at(array(1, 2, 3), 5"}]} -- !query @@ -174,7 +174,7 @@ select element_at(array(1, 2, 3), -5) struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -{"errorClass":"INVALID_ARRAY_INDEX_IN_ELEMENT_AT","messageParameters":["-5","3","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":36,"fragment":"element_at(array(1, 2, 3), -5"}]} +{"errorClass":"INVALID_ARRAY_INDEX_IN_ELEMENT_AT","messageParameters":{"indexValue":"-5","arraySize":"3","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":36,"fragment":"element_at(array(1, 2, 3), -5"}]} -- !query @@ -183,7 +183,7 @@ select element_at(array(1, 2, 3), 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":{},"queryContext":[]} -- !query @@ -192,7 +192,7 @@ select elt(4, '123', '456') struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["4","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"elt(4, '123', '456'"}]} +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":{"indexValue":"4","arraySize":"2","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"elt(4, '123', '456'"}]} -- !query @@ -201,7 +201,7 @@ select elt(0, '123', '456') struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["0","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"elt(0, '123', '456'"}]} +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":{"indexValue":"0","arraySize":"2","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"elt(0, '123', '456'"}]} -- !query @@ -210,7 +210,7 @@ select elt(-1, '123', '456') struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["-1","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"elt(-1, '123', '456'"}]} +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":{"indexValue":"-1","arraySize":"2","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"elt(-1, '123', '456'"}]} -- !query @@ -251,7 +251,7 @@ select array(1, 2, 3)[5] struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["5","3","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":23,"fragment":"array(1, 2, 3)[5"}]} +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":{"indexValue":"5","arraySize":"3","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":23,"fragment":"array(1, 2, 3)[5"}]} -- !query @@ -260,7 +260,7 @@ select array(1, 2, 3)[-1] struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["-1","3","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":24,"fragment":"array(1, 2, 3)[-1"}]} +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":{"indexValue":"-1","arraySize":"3","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":24,"fragment":"array(1, 2, 3)[-1"}]} -- !query @@ -301,7 +301,7 @@ select array_size(map('a', 1, 'b', 2)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'array_size(map('a', 1, 'b', 2))' due to data type mismatch: argument 1 requires array type, however, 'map('a', 1, 'b', 2)' is of map type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'array_size(map('a', 1, 'b', 2))' due to data type mismatch: argument 1 requires array type, however, 'map('a', 1, 'b', 2)' is of map type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -334,7 +334,7 @@ select element_at(array(1, 2, 3), 5) struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -{"errorClass":"INVALID_ARRAY_INDEX_IN_ELEMENT_AT","messageParameters":["5","3","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":35,"fragment":"element_at(array(1, 2, 3), 5"}]} +{"errorClass":"INVALID_ARRAY_INDEX_IN_ELEMENT_AT","messageParameters":{"indexValue":"5","arraySize":"3","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":35,"fragment":"element_at(array(1, 2, 3), 5"}]} -- !query @@ -343,7 +343,7 @@ select element_at(array(1, 2, 3), -5) struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -{"errorClass":"INVALID_ARRAY_INDEX_IN_ELEMENT_AT","messageParameters":["-5","3","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":36,"fragment":"element_at(array(1, 2, 3), -5"}]} +{"errorClass":"INVALID_ARRAY_INDEX_IN_ELEMENT_AT","messageParameters":{"indexValue":"-5","arraySize":"3","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":36,"fragment":"element_at(array(1, 2, 3), -5"}]} -- !query @@ -352,7 +352,7 @@ select element_at(array(1, 2, 3), 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":{},"queryContext":[]} -- !query @@ -361,7 +361,7 @@ select elt(4, '123', '456') struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["4","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"elt(4, '123', '456'"}]} +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":{"indexValue":"4","arraySize":"2","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"elt(4, '123', '456'"}]} -- !query @@ -370,7 +370,7 @@ select elt(0, '123', '456') struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["0","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"elt(0, '123', '456'"}]} +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":{"indexValue":"0","arraySize":"2","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"elt(0, '123', '456'"}]} -- !query @@ -379,4 +379,4 @@ select elt(-1, '123', '456') struct<> -- !query output org.apache.spark.SparkArrayIndexOutOfBoundsException -{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":["-1","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"elt(-1, '123', '456'"}]} +{"errorClass":"INVALID_ARRAY_INDEX","messageParameters":{"indexValue":"-1","arraySize":"2","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"elt(-1, '123', '456'"}]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out index 2ea4fededd3d2..d6beaee02a2eb 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out @@ -5,7 +5,7 @@ SELECT CAST('1.23' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1.23'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":25,"fragment":"CAST('1.23' AS int"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'1.23'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":25,"fragment":"CAST('1.23' AS int"}]} -- !query @@ -14,7 +14,7 @@ SELECT CAST('1.23' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1.23'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"CAST('1.23' AS long"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'1.23'","sourceType":"\"STRING\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"CAST('1.23' AS long"}]} -- !query @@ -23,7 +23,7 @@ SELECT CAST('-4.56' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'-4.56'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"CAST('-4.56' AS int"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'-4.56'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"CAST('-4.56' AS int"}]} -- !query @@ -32,7 +32,7 @@ SELECT CAST('-4.56' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'-4.56'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"CAST('-4.56' AS long"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'-4.56'","sourceType":"\"STRING\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"CAST('-4.56' AS long"}]} -- !query @@ -41,7 +41,7 @@ SELECT CAST('abc' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'abc'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":24,"fragment":"CAST('abc' AS int"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'abc'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":24,"fragment":"CAST('abc' AS int"}]} -- !query @@ -50,7 +50,7 @@ SELECT CAST('abc' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'abc'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":25,"fragment":"CAST('abc' AS long"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'abc'","sourceType":"\"STRING\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":25,"fragment":"CAST('abc' AS long"}]} -- !query @@ -59,7 +59,7 @@ SELECT CAST('abc' AS float) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'abc'","\"STRING\"","\"FLOAT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"CAST('abc' AS float"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'abc'","sourceType":"\"STRING\"","targetType":"\"FLOAT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"CAST('abc' AS float"}]} -- !query @@ -68,7 +68,7 @@ SELECT CAST('abc' AS double) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'abc'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"CAST('abc' AS double"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'abc'","sourceType":"\"STRING\"","targetType":"\"DOUBLE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"CAST('abc' AS double"}]} -- !query @@ -77,7 +77,7 @@ SELECT CAST('1234567890123' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1234567890123'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":34,"fragment":"CAST('1234567890123' AS int"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'1234567890123'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":34,"fragment":"CAST('1234567890123' AS int"}]} -- !query @@ -86,7 +86,7 @@ SELECT CAST('12345678901234567890123' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'12345678901234567890123'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":45,"fragment":"CAST('12345678901234567890123' AS long"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'12345678901234567890123'","sourceType":"\"STRING\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":45,"fragment":"CAST('12345678901234567890123' AS long"}]} -- !query @@ -95,7 +95,7 @@ SELECT CAST('' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["''","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"CAST('' AS int"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"''","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"CAST('' AS int"}]} -- !query @@ -104,7 +104,7 @@ SELECT CAST('' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["''","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":22,"fragment":"CAST('' AS long"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"''","sourceType":"\"STRING\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":22,"fragment":"CAST('' AS long"}]} -- !query @@ -113,7 +113,7 @@ SELECT CAST('' AS float) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["''","\"STRING\"","\"FLOAT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":23,"fragment":"CAST('' AS float"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"''","sourceType":"\"STRING\"","targetType":"\"FLOAT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":23,"fragment":"CAST('' AS float"}]} -- !query @@ -122,7 +122,7 @@ SELECT CAST('' AS double) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["''","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":24,"fragment":"CAST('' AS double"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"''","sourceType":"\"STRING\"","targetType":"\"DOUBLE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":24,"fragment":"CAST('' AS double"}]} -- !query @@ -147,7 +147,7 @@ SELECT CAST('123.a' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'123.a'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"CAST('123.a' AS int"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'123.a'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":26,"fragment":"CAST('123.a' AS int"}]} -- !query @@ -156,7 +156,7 @@ SELECT CAST('123.a' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'123.a'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"CAST('123.a' AS long"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'123.a'","sourceType":"\"STRING\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"CAST('123.a' AS long"}]} -- !query @@ -165,7 +165,7 @@ SELECT CAST('123.a' AS float) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'123.a'","\"STRING\"","\"FLOAT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":28,"fragment":"CAST('123.a' AS float"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'123.a'","sourceType":"\"STRING\"","targetType":"\"FLOAT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":28,"fragment":"CAST('123.a' AS float"}]} -- !query @@ -174,7 +174,7 @@ SELECT CAST('123.a' AS double) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'123.a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"CAST('123.a' AS double"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'123.a'","sourceType":"\"STRING\"","targetType":"\"DOUBLE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"CAST('123.a' AS double"}]} -- !query @@ -191,7 +191,7 @@ SELECT CAST('-2147483649' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'-2147483649'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":32,"fragment":"CAST('-2147483649' AS int"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'-2147483649'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":32,"fragment":"CAST('-2147483649' AS int"}]} -- !query @@ -208,7 +208,7 @@ SELECT CAST('2147483648' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'2147483648'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":31,"fragment":"CAST('2147483648' AS int"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'2147483648'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":31,"fragment":"CAST('2147483648' AS int"}]} -- !query @@ -225,7 +225,7 @@ SELECT CAST('-9223372036854775809' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'-9223372036854775809'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":42,"fragment":"CAST('-9223372036854775809' AS long"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'-9223372036854775809'","sourceType":"\"STRING\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":42,"fragment":"CAST('-9223372036854775809' AS long"}]} -- !query @@ -242,7 +242,7 @@ SELECT CAST('9223372036854775808' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'9223372036854775808'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":41,"fragment":"CAST('9223372036854775808' AS long"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'9223372036854775808'","sourceType":"\"STRING\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":41,"fragment":"CAST('9223372036854775808' AS long"}]} -- !query @@ -259,7 +259,7 @@ SELECT HEX(CAST(CAST(123 AS byte) AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(CAST(123 AS TINYINT) AS BINARY)' due to data type mismatch: \n cannot cast tinyint to binary with ANSI mode on.\n If you have to cast tinyint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(CAST(123 AS TINYINT) AS BINARY)' due to data type mismatch: \n cannot cast tinyint to binary with ANSI mode on.\n If you have to cast tinyint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"},"queryContext":[]} -- !query @@ -268,7 +268,7 @@ SELECT HEX(CAST(CAST(-123 AS byte) AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(CAST(-123 AS TINYINT) AS BINARY)' due to data type mismatch: \n cannot cast tinyint to binary with ANSI mode on.\n If you have to cast tinyint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(CAST(-123 AS TINYINT) AS BINARY)' due to data type mismatch: \n cannot cast tinyint to binary with ANSI mode on.\n If you have to cast tinyint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"},"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT HEX(CAST(123S AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(123S AS BINARY)' due to data type mismatch: \n cannot cast smallint to binary with ANSI mode on.\n If you have to cast smallint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(123S AS BINARY)' due to data type mismatch: \n cannot cast smallint to binary with ANSI mode on.\n If you have to cast smallint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"},"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT HEX(CAST(-123S AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(-123S AS BINARY)' due to data type mismatch: \n cannot cast smallint to binary with ANSI mode on.\n If you have to cast smallint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(-123S AS BINARY)' due to data type mismatch: \n cannot cast smallint to binary with ANSI mode on.\n If you have to cast smallint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"},"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT HEX(CAST(123 AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(123 AS BINARY)' due to data type mismatch: \n cannot cast int to binary with ANSI mode on.\n If you have to cast int to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(123 AS BINARY)' due to data type mismatch: \n cannot cast int to binary with ANSI mode on.\n If you have to cast int to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"},"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT HEX(CAST(-123 AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(-123 AS BINARY)' due to data type mismatch: \n cannot cast int to binary with ANSI mode on.\n If you have to cast int to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(-123 AS BINARY)' due to data type mismatch: \n cannot cast int to binary with ANSI mode on.\n If you have to cast int to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"},"queryContext":[]} -- !query @@ -313,7 +313,7 @@ SELECT HEX(CAST(123L AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(123L AS BINARY)' due to data type mismatch: \n cannot cast bigint to binary with ANSI mode on.\n If you have to cast bigint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(123L AS BINARY)' due to data type mismatch: \n cannot cast bigint to binary with ANSI mode on.\n If you have to cast bigint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"},"queryContext":[]} -- !query @@ -322,7 +322,7 @@ SELECT HEX(CAST(-123L AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(-123L AS BINARY)' due to data type mismatch: \n cannot cast bigint to binary with ANSI mode on.\n If you have to cast bigint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(-123L AS BINARY)' due to data type mismatch: \n cannot cast bigint to binary with ANSI mode on.\n If you have to cast bigint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11"},"queryContext":[]} -- !query @@ -380,7 +380,7 @@ SELECT CAST(interval 3 month 1 hour AS string) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval 3 month 1 hour(line 1, pos 12)\n\n== SQL ==\nSELECT CAST(interval 3 month 1 hour AS string)\n------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval 3 month 1 hour(line 1, pos 12)\n\n== SQL ==\nSELECT CAST(interval 3 month 1 hour AS string)\n------------^^^\n"},"queryContext":[]} -- !query @@ -469,7 +469,7 @@ select cast('1中文' as tinyint) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1中文'","\"STRING\"","\"TINYINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":28,"fragment":"cast('1中文' as tinyint"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'1中文'","sourceType":"\"STRING\"","targetType":"\"TINYINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":28,"fragment":"cast('1中文' as tinyint"}]} -- !query @@ -478,7 +478,7 @@ select cast('1中文' as smallint) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1中文'","\"STRING\"","\"SMALLINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"cast('1中文' as smallint"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'1中文'","sourceType":"\"STRING\"","targetType":"\"SMALLINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"cast('1中文' as smallint"}]} -- !query @@ -487,7 +487,7 @@ select cast('1中文' as INT) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1中文'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":24,"fragment":"cast('1中文' as INT"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'1中文'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":24,"fragment":"cast('1中文' as INT"}]} -- !query @@ -496,7 +496,7 @@ select cast('中文1' as bigint) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'中文1'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"cast('中文1' as bigint"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'中文1'","sourceType":"\"STRING\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"cast('中文1' as bigint"}]} -- !query @@ -505,7 +505,7 @@ select cast('1中文' as bigint) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1中文'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"cast('1中文' as bigint"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'1中文'","sourceType":"\"STRING\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"cast('1中文' as bigint"}]} -- !query @@ -532,7 +532,7 @@ select cast('\t\n xyz \t\r' as boolean) struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'\t\n xyz \t\r'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":38,"fragment":"cast('\\t\\n xyz \\t\\r' as boolean"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'\t\n xyz \t\r'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":38,"fragment":"cast('\\t\\n xyz \\t\\r' as boolean"}]} -- !query @@ -549,7 +549,7 @@ select cast('123.45' as decimal(4, 2)) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 123.45, 5, 2)","4","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":37,"fragment":"cast('123.45' as decimal(4, 2)"}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(expanded, 123.45, 5, 2)","precision":"4","scale":"2","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":37,"fragment":"cast('123.45' as decimal(4, 2)"}]} -- !query @@ -558,7 +558,7 @@ select cast('xyz' as decimal(4, 2)) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'xyz'","\"STRING\"","\"DECIMAL(4,2)\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":34,"fragment":"cast('xyz' as decimal(4, 2)"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'xyz'","sourceType":"\"STRING\"","targetType":"\"DECIMAL(4,2)\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":34,"fragment":"cast('xyz' as decimal(4, 2)"}]} -- !query @@ -575,7 +575,7 @@ select cast('a' as date) struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DATE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":23,"fragment":"cast('a' as date"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'a'","sourceType":"\"STRING\"","targetType":"\"DATE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":23,"fragment":"cast('a' as date"}]} -- !query @@ -592,7 +592,7 @@ select cast('a' as timestamp) struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":28,"fragment":"cast('a' as timestamp"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'a'","sourceType":"\"STRING\"","targetType":"\"TIMESTAMP\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":28,"fragment":"cast('a' as timestamp"}]} -- !query @@ -609,7 +609,7 @@ select cast('a' as timestamp_ntz) struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"TIMESTAMP_NTZ\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":32,"fragment":"cast('a' as timestamp_ntz"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'a'","sourceType":"\"STRING\"","targetType":"\"TIMESTAMP_NTZ\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":32,"fragment":"cast('a' as timestamp_ntz"}]} -- !query @@ -618,7 +618,7 @@ select cast(cast('inf' as double) as timestamp) struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["Infinity","\"DOUBLE\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":46,"fragment":"cast(cast('inf' as double) as timestamp"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"Infinity","sourceType":"\"DOUBLE\"","targetType":"\"TIMESTAMP\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":46,"fragment":"cast(cast('inf' as double) as timestamp"}]} -- !query @@ -627,7 +627,7 @@ select cast(cast('inf' as float) as timestamp) struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["Infinity","\"DOUBLE\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":45,"fragment":"cast(cast('inf' as float) as timestamp"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"Infinity","sourceType":"\"DOUBLE\"","targetType":"\"TIMESTAMP\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":45,"fragment":"cast(cast('inf' as float) as timestamp"}]} -- !query @@ -668,7 +668,7 @@ select cast(interval '23:59:59' hour to second as smallint) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["INTERVAL '23:59:59' HOUR TO SECOND","\"INTERVAL HOUR TO SECOND\"","\"SMALLINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"INTERVAL '23:59:59' HOUR TO SECOND","sourceType":"\"INTERVAL HOUR TO SECOND\"","targetType":"\"SMALLINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -693,7 +693,7 @@ select cast(interval '-1000' month as tinyint) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["INTERVAL '-1000' MONTH","\"INTERVAL MONTH\"","\"TINYINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"INTERVAL '-1000' MONTH","sourceType":"\"INTERVAL MONTH\"","targetType":"\"TINYINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -702,7 +702,7 @@ select cast(interval '1000000' second as smallint) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["INTERVAL '1000000' SECOND","\"INTERVAL SECOND\"","\"SMALLINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"INTERVAL '1000000' SECOND","sourceType":"\"INTERVAL SECOND\"","targetType":"\"SMALLINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -767,7 +767,7 @@ select cast(2147483647 as interval year) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["2147483647","\"INT\"","\"INTERVAL YEAR\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"2147483647","sourceType":"\"INT\"","targetType":"\"INTERVAL YEAR\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -776,7 +776,7 @@ select cast(-9223372036854775808L as interval day) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["-9223372036854775808L","\"BIGINT\"","\"INTERVAL DAY\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"-9223372036854775808L","sourceType":"\"BIGINT\"","targetType":"\"INTERVAL DAY\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -841,4 +841,4 @@ select cast(interval '10.123' second as decimal(1, 0)) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(compact, 10, 18, 6)","1","0","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":53,"fragment":"cast(interval '10.123' second as decimal(1, 0)"}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(compact, 10, 18, 6)","precision":"1","scale":"0","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":53,"fragment":"cast(interval '10.123' second as decimal(1, 0)"}]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index 924f2244d40b3..f58432c69b64f 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -21,7 +21,7 @@ select date '2020-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 2020-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect date '2020-01-01中文'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 2020-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect date '2020-01-01中文'\n-------^^^\n"},"queryContext":[]} -- !query @@ -38,7 +38,7 @@ select make_date(2000, 13, 1) struct<> -- !query output java.time.DateTimeException -{"errorClass":"legacy","messageParameters":["Invalid value for MonthOfYear (valid values 1 - 12): 13. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid value for MonthOfYear (valid values 1 - 12): 13. If necessary set spark.sql.ansi.enabled to false to bypass this error."},"queryContext":[]} -- !query @@ -47,7 +47,7 @@ select make_date(2000, 1, 33) struct<> -- !query output java.time.DateTimeException -{"errorClass":"legacy","messageParameters":["Invalid value for DayOfMonth (valid values 1 - 28/31): 33. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid value for DayOfMonth (valid values 1 - 28/31): 33. If necessary set spark.sql.ansi.enabled to false to bypass this error."},"queryContext":[]} -- !query @@ -56,7 +56,7 @@ select date'015' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 015(line 1, pos 7)\n\n== SQL ==\nselect date'015'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 015(line 1, pos 7)\n\n== SQL ==\nselect date'015'\n-------^^^\n"},"queryContext":[]} -- !query @@ -65,7 +65,7 @@ select date'2021-4294967297-11' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)\n\n== SQL ==\nselect date'2021-4294967297-11'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)\n\n== SQL ==\nselect date'2021-4294967297-11'\n-------^^^\n"},"queryContext":[]} -- !query @@ -122,7 +122,7 @@ select to_date("02-29", "MM-dd") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Invalid date 'February 29' as '1970' is not a leap year","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Invalid date 'February 29' as '1970' is not a leap year","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -181,7 +181,7 @@ select next_day("2015-07-23", "xx") struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Illegal input for day of week: xx. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Illegal input for day of week: xx. If necessary set spark.sql.ansi.enabled to false to bypass this error."},"queryContext":[]} -- !query @@ -214,7 +214,7 @@ select next_day("xx", "Mon") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'xx'","\"STRING\"","\"DATE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"next_day(\"xx\", \"Mon\""}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'xx'","sourceType":"\"STRING\"","targetType":"\"DATE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"next_day(\"xx\", \"Mon\""}]} -- !query @@ -271,7 +271,7 @@ select date_add('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -280,7 +280,7 @@ select date_add('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -289,7 +289,7 @@ select date_add('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -306,7 +306,7 @@ select date_add('2011-11-11', '1.2') struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1.2'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":35,"fragment":"date_add('2011-11-11', '1.2'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'1.2'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":35,"fragment":"date_add('2011-11-11', '1.2'"}]} -- !query @@ -379,7 +379,7 @@ select date_sub('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -388,7 +388,7 @@ select date_sub('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -397,7 +397,7 @@ select date_sub('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -414,7 +414,7 @@ select date_sub(date'2011-11-11', '1.2') struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1.2'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":39,"fragment":"date_sub(date'2011-11-11', '1.2'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'1.2'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":39,"fragment":"date_sub(date'2011-11-11', '1.2'"}]} -- !query @@ -487,7 +487,7 @@ select date '2011-11-11' + 1E1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -576,7 +576,7 @@ select date'2011-11-11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DATE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DATE)' is of date type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DATE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DATE)' is of date type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -585,7 +585,7 @@ select '1' + date'2011-11-11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('1' AS DATE), DATE '2011-11-11')' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'DATE '2011-11-11'' is of date type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('1' AS DATE), DATE '2011-11-11')' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'DATE '2011-11-11'' is of date type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -623,7 +623,7 @@ select to_date('26/October/2015', 'dd/MMMMM/yyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd/MMMMM/yyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -632,7 +632,7 @@ select from_json('{"d":"26/October/2015"}', 'd Date', map('dateFormat', 'dd/MMMM struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd/MMMMM/yyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -641,7 +641,7 @@ select from_csv('26/October/2015', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy')) struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd/MMMMM/yyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out index 333663a2308f3..c82bb590960e0 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out @@ -5,7 +5,7 @@ select to_timestamp('294248', 'y') struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -14,7 +14,7 @@ select to_timestamp('1', 'yy') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'1'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'1'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -23,7 +23,7 @@ select to_timestamp('-12', 'yy') struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '-12' could not be parsed at index 0","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '-12' could not be parsed at index 0","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -32,7 +32,7 @@ select to_timestamp('123', 'yy') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'123'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'123'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -41,7 +41,7 @@ select to_timestamp('1', 'yyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'1'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'1'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -50,7 +50,7 @@ select to_timestamp('1234567', 'yyyyyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'yyyyyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'yyyyyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -59,7 +59,7 @@ select to_timestamp('366', 'D') struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Invalid date 'DayOfYear 366' as '1970' is not a leap year","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Invalid date 'DayOfYear 366' as '1970' is not a leap year","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -68,7 +68,7 @@ select to_timestamp('9', 'DD') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'9'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'9'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -77,7 +77,7 @@ select to_timestamp('9', 'DDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'9'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'9'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -86,7 +86,7 @@ select to_timestamp('99', 'DDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'99'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'99'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -95,7 +95,7 @@ select to_timestamp('30-365', 'dd-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Conflict found: Field DayOfMonth 30 differs from DayOfMonth 31 derived from 1970-12-31","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Conflict found: Field DayOfMonth 30 differs from DayOfMonth 31 derived from 1970-12-31","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -104,7 +104,7 @@ select to_timestamp('11-365', 'MM-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Conflict found: Field MonthOfYear 11 differs from MonthOfYear 12 derived from 1970-12-31","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Conflict found: Field MonthOfYear 11 differs from MonthOfYear 12 derived from 1970-12-31","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -113,7 +113,7 @@ select to_timestamp('2019-366', 'yyyy-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2019-366' could not be parsed: Invalid date 'DayOfYear 366' as '2019' is not a leap year","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '2019-366' could not be parsed: Invalid date 'DayOfYear 366' as '2019' is not a leap year","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -122,7 +122,7 @@ select to_timestamp('12-30-365', 'MM-dd-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Conflict found: Field DayOfMonth 30 differs from DayOfMonth 31 derived from 1970-12-31","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Conflict found: Field DayOfMonth 30 differs from DayOfMonth 31 derived from 1970-12-31","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -131,7 +131,7 @@ select to_timestamp('2020-01-365', 'yyyy-dd-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-01-365' could not be parsed: Conflict found: Field DayOfMonth 30 differs from DayOfMonth 1 derived from 2020-12-30","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '2020-01-365' could not be parsed: Conflict found: Field DayOfMonth 30 differs from DayOfMonth 1 derived from 2020-12-30","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -140,7 +140,7 @@ select to_timestamp('2020-10-350', 'yyyy-MM-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-10-350' could not be parsed: Conflict found: Field MonthOfYear 12 differs from MonthOfYear 10 derived from 2020-12-15","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '2020-10-350' could not be parsed: Conflict found: Field MonthOfYear 12 differs from MonthOfYear 10 derived from 2020-12-15","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -149,7 +149,7 @@ select to_timestamp('2020-11-31-366', 'yyyy-MM-dd-DDD') struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-11-31-366' could not be parsed: Invalid date 'NOVEMBER 31'","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '2020-11-31-366' could not be parsed: Invalid date 'NOVEMBER 31'","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -158,7 +158,7 @@ select from_csv('2018-366', 'date Date', map('dateFormat', 'yyyy-DDD')) struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'2018-366'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'2018-366'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -167,7 +167,7 @@ select to_date("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-01-27T20:06:11.847' could not be parsed at index 10","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '2020-01-27T20:06:11.847' could not be parsed at index 10","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -176,7 +176,7 @@ select to_date("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text 'Unparseable' could not be parsed at index 0","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text 'Unparseable' could not be parsed at index 0","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -185,7 +185,7 @@ select to_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-01-27T20:06:11.847' could not be parsed at index 10","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '2020-01-27T20:06:11.847' could not be parsed at index 10","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -194,7 +194,7 @@ select to_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text 'Unparseable' could not be parsed at index 0","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text 'Unparseable' could not be parsed at index 0","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -203,7 +203,7 @@ select unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-01-27T20:06:11.847' could not be parsed at index 10","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '2020-01-27T20:06:11.847' could not be parsed at index 10","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -212,7 +212,7 @@ select unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text 'Unparseable' could not be parsed at index 0","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text 'Unparseable' could not be parsed at index 0","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -221,7 +221,7 @@ select to_unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2020-01-27T20:06:11.847' could not be parsed at index 10","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '2020-01-27T20:06:11.847' could not be parsed at index 10","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -230,7 +230,7 @@ select to_unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text 'Unparseable' could not be parsed at index 0","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text 'Unparseable' could not be parsed at index 0","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -239,7 +239,7 @@ select cast("Unparseable" as timestamp) struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'Unparseable'","\"STRING\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":38,"fragment":"cast(\"Unparseable\" as timestamp"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'Unparseable'","sourceType":"\"STRING\"","targetType":"\"TIMESTAMP\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":38,"fragment":"cast(\"Unparseable\" as timestamp"}]} -- !query @@ -248,4 +248,4 @@ select cast("Unparseable" as date) struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'Unparseable'","\"STRING\"","\"DATE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":33,"fragment":"cast(\"Unparseable\" as date"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'Unparseable'","sourceType":"\"STRING\"","targetType":"\"DATE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":33,"fragment":"cast(\"Unparseable\" as date"}]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out index 3dabee1ab640d..21caac7ef9d4f 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out @@ -73,7 +73,7 @@ select (5e36BD + 0.1) + 5e36BD struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 10000000000000000000000000000000000000.1, 39, 1)","38","1","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"(5e36BD + 0.1) + 5e36B"}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(expanded, 10000000000000000000000000000000000000.1, 39, 1)","precision":"38","scale":"1","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"(5e36BD + 0.1) + 5e36B"}]} -- !query @@ -82,7 +82,7 @@ select (-4e36BD - 0.1) - 7e36BD struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, -11000000000000000000000000000000000000.1, 39, 1)","38","1","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":30,"fragment":"(-4e36BD - 0.1) - 7e36B"}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(expanded, -11000000000000000000000000000000000000.1, 39, 1)","precision":"38","scale":"1","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":30,"fragment":"(-4e36BD - 0.1) - 7e36B"}]} -- !query @@ -91,7 +91,7 @@ select 12345678901234567890.0 * 12345678901234567890.0 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 152415787532388367501905199875019052100, 39, 0)","38","2","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":53,"fragment":"12345678901234567890.0 * 12345678901234567890."}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(expanded, 152415787532388367501905199875019052100, 39, 0)","precision":"38","scale":"2","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":53,"fragment":"12345678901234567890.0 * 12345678901234567890."}]} -- !query @@ -100,7 +100,7 @@ select 1e35BD / 0.1 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 1000000000000000000000000000000000000.00000000000000000000000000000000000000, 75, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":18,"fragment":"1e35BD / 0."}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(expanded, 1000000000000000000000000000000000000.00000000000000000000000000000000000000, 75, 38)","precision":"38","scale":"6","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":18,"fragment":"1e35BD / 0."}]} -- !query @@ -133,7 +133,7 @@ select 1.0123456789012345678901234567890123456e36BD / 0.1 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 10123456789012345678901234567890123456.00000000000000000000000000000000000000, 76, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e36BD / 0."}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(expanded, 10123456789012345678901234567890123456.00000000000000000000000000000000000000, 76, 38)","precision":"38","scale":"6","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e36BD / 0."}]} -- !query @@ -142,7 +142,7 @@ select 1.0123456789012345678901234567890123456e35BD / 1.0 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 101234567890123456789012345678901234.56000000000000000000000000000000000000, 74, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e35BD / 1."}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(expanded, 101234567890123456789012345678901234.56000000000000000000000000000000000000, 74, 38)","precision":"38","scale":"6","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e35BD / 1."}]} -- !query @@ -151,7 +151,7 @@ select 1.0123456789012345678901234567890123456e34BD / 1.0 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 10123456789012345678901234567890123.45600000000000000000000000000000000000, 73, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e34BD / 1."}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(expanded, 10123456789012345678901234567890123.45600000000000000000000000000000000000, 73, 38)","precision":"38","scale":"6","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e34BD / 1."}]} -- !query @@ -160,7 +160,7 @@ select 1.0123456789012345678901234567890123456e33BD / 1.0 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 1012345678901234567890123456789012.34560000000000000000000000000000000000, 72, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e33BD / 1."}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(expanded, 1012345678901234567890123456789012.34560000000000000000000000000000000000, 72, 38)","precision":"38","scale":"6","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e33BD / 1."}]} -- !query @@ -169,7 +169,7 @@ select 1.0123456789012345678901234567890123456e32BD / 1.0 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 101234567890123456789012345678901.23456000000000000000000000000000000000, 71, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e32BD / 1."}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(expanded, 101234567890123456789012345678901.23456000000000000000000000000000000000, 71, 38)","precision":"38","scale":"6","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e32BD / 1."}]} -- !query @@ -186,7 +186,7 @@ select 1.0123456789012345678901234567890123456e31BD / 0.1 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 101234567890123456789012345678901.23456000000000000000000000000000000000, 71, 38)","38","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e31BD / 0."}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(expanded, 101234567890123456789012345678901.23456000000000000000000000000000000000, 71, 38)","precision":"38","scale":"6","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":56,"fragment":"1.0123456789012345678901234567890123456e31BD / 0."}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out index ba2603a50e443..368bea9c3a07a 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out @@ -17,7 +17,7 @@ select upper(x -> x) as v struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["A lambda function should only be used in a higher order function. However, its class is org.apache.spark.sql.catalyst.expressions.Upper, which is not a higher order function.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"A lambda function should only be used in a higher order function. However, its class is org.apache.spark.sql.catalyst.expressions.Upper, which is not a higher order function.; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 7593d47bf931a..936e53416da77 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -13,7 +13,7 @@ select interval 4 month 2 weeks 3 microseconds * 1.5 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval 4 month 2 weeks 3 microseconds(line 1, pos 7)\n\n== SQL ==\nselect interval 4 month 2 weeks 3 microseconds * 1.5\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval 4 month 2 weeks 3 microseconds(line 1, pos 7)\n\n== SQL ==\nselect interval 4 month 2 weeks 3 microseconds * 1.5\n-------^^^\n"},"queryContext":[]} -- !query @@ -46,7 +46,7 @@ select interval 2147483647 month * 2 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["integer overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"integer overflow"},"queryContext":[]} -- !query @@ -55,7 +55,7 @@ select interval 2147483647 month / 0.5 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Overflow"},"queryContext":[]} -- !query @@ -64,7 +64,7 @@ select interval 2147483647 day * 2 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -73,7 +73,7 @@ select interval 2147483647 day / 0.5 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -114,7 +114,7 @@ select interval 2 second * 'a' struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"interval 2 second * 'a"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'a'","sourceType":"\"STRING\"","targetType":"\"DOUBLE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"interval 2 second * 'a"}]} -- !query @@ -123,7 +123,7 @@ select interval 2 second / 'a' struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"interval 2 second / 'a"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'a'","sourceType":"\"STRING\"","targetType":"\"DOUBLE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"interval 2 second / 'a"}]} -- !query @@ -132,7 +132,7 @@ select interval 2 year * 'a' struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"interval 2 year * 'a"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'a'","sourceType":"\"STRING\"","targetType":"\"DOUBLE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"interval 2 year * 'a"}]} -- !query @@ -141,7 +141,7 @@ select interval 2 year / 'a' struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"interval 2 year / 'a"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'a'","sourceType":"\"STRING\"","targetType":"\"DOUBLE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"interval 2 year / 'a"}]} -- !query @@ -166,7 +166,7 @@ select 'a' * interval 2 second struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"'a' * interval 2 secon"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'a'","sourceType":"\"STRING\"","targetType":"\"DOUBLE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"'a' * interval 2 secon"}]} -- !query @@ -175,7 +175,7 @@ select 'a' * interval 2 year struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"'a' * interval 2 yea"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'a'","sourceType":"\"STRING\"","targetType":"\"DOUBLE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"'a' * interval 2 yea"}]} -- !query @@ -184,7 +184,7 @@ select '2' / interval 2 second struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('2' / INTERVAL '02' SECOND)' due to data type mismatch: differing types in '('2' / INTERVAL '02' SECOND)' (string and interval second).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('2' / INTERVAL '02' SECOND)' due to data type mismatch: differing types in '('2' / INTERVAL '02' SECOND)' (string and interval second).; line 1 pos 7"},"queryContext":[]} -- !query @@ -193,7 +193,7 @@ select '2' / interval 2 year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('2' / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '('2' / INTERVAL '2' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('2' / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '('2' / INTERVAL '2' YEAR)' (string and interval year).; line 1 pos 7"},"queryContext":[]} -- !query @@ -202,7 +202,7 @@ select interval '2 seconds' / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_DIVIDED_BY_ZERO","sqlState":"22012","messageParameters":[],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":30,"fragment":"interval '2 seconds' / "}]} +{"errorClass":"INTERVAL_DIVIDED_BY_ZERO","sqlState":"22012","messageParameters":{},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":30,"fragment":"interval '2 seconds' / "}]} -- !query @@ -235,7 +235,7 @@ select interval '2' year / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_DIVIDED_BY_ZERO","sqlState":"22012","messageParameters":[],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"interval '2' year / "}]} +{"errorClass":"INTERVAL_DIVIDED_BY_ZERO","sqlState":"22012","messageParameters":{},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"interval '2' year / "}]} -- !query @@ -268,7 +268,7 @@ select 2 / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(2 / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(2 / INTERVAL '2' YEAR)' (int and interval year).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(2 / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(2 / INTERVAL '2' YEAR)' (int and interval year).; line 1 pos 7"},"queryContext":[]} -- !query @@ -277,7 +277,7 @@ select 2 / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(2 / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(2 / INTERVAL '02' HOUR)' (int and interval hour).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(2 / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(2 / INTERVAL '02' HOUR)' (int and interval hour).; line 1 pos 7"},"queryContext":[]} -- !query @@ -286,7 +286,7 @@ select null / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(NULL / INTERVAL '2' YEAR)' (void and interval year).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(NULL / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(NULL / INTERVAL '2' YEAR)' (void and interval year).; line 1 pos 7"},"queryContext":[]} -- !query @@ -295,7 +295,7 @@ select null / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(NULL / INTERVAL '02' HOUR)' (void and interval hour).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(NULL / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(NULL / INTERVAL '02' HOUR)' (void and interval hour).; line 1 pos 7"},"queryContext":[]} -- !query @@ -304,7 +304,7 @@ select -interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect -interval '-1 month 1 day -1 second'\n--------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect -interval '-1 month 1 day -1 second'\n--------^^^\n"},"queryContext":[]} -- !query @@ -329,7 +329,7 @@ select -interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect -interval -1 month 1 day -1 second\n--------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect -interval -1 month 1 day -1 second\n--------^^^\n"},"queryContext":[]} -- !query @@ -354,7 +354,7 @@ select +interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect +interval '-1 month 1 day -1 second'\n--------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect +interval '-1 month 1 day -1 second'\n--------^^^\n"},"queryContext":[]} -- !query @@ -379,7 +379,7 @@ select +interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect +interval -1 month 1 day -1 second\n--------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect +interval -1 month 1 day -1 second\n--------^^^\n"},"queryContext":[]} -- !query @@ -612,7 +612,7 @@ select make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(expanded, 1234567890123456789, 20, 0)","18","6","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":58,"fragment":"make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789"}]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(expanded, 1234567890123456789, 20, 0)","precision":"18","scale":"6","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":58,"fragment":"make_interval(0, 0, 0, 0, 0, 0, 1234567890123456789"}]} -- !query @@ -661,7 +661,7 @@ select make_dt_interval(2147483647) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -702,7 +702,7 @@ select make_ym_interval(178956970, 8) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["integer overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"integer overflow"},"queryContext":[]} -- !query @@ -719,7 +719,7 @@ select make_ym_interval(-178956970, -9) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["integer overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"integer overflow"},"queryContext":[]} -- !query @@ -792,7 +792,7 @@ select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisec struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond(line 1, pos 7)\n\n== SQL ==\nselect interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond(line 1, pos 7)\n\n== SQL ==\nselect interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond\n-------^^^\n"},"queryContext":[]} -- !query @@ -825,7 +825,7 @@ select interval '30' year '25' month '-100' day '40' hour '80' minute '299.88998 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second(line 1, pos 7)\n\n== SQL ==\nselect interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second(line 1, pos 7)\n\n== SQL ==\nselect interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second\n-------^^^\n"},"queryContext":[]} -- !query @@ -954,7 +954,7 @@ select interval '20 15:40:32.99899999' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to hour\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to hour\n----------------^^^\n"},"queryContext":[]} -- !query @@ -963,7 +963,7 @@ select interval '20 15:40:32.99899999' day to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to minute\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to minute\n----------------^^^\n"},"queryContext":[]} -- !query @@ -972,7 +972,7 @@ select interval '15:40:32.99899999' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40:32.99899999' hour to minute\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40:32.99899999' hour to minute\n----------------^^^\n"},"queryContext":[]} -- !query @@ -981,7 +981,7 @@ select interval '15:40.99899999' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40.99899999' hour to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40.99899999' hour to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -990,7 +990,7 @@ select interval '15:40' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40' hour to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40' hour to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -999,7 +999,7 @@ select interval '20 40:32.99899999' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 40:32.99899999' minute to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 40:32.99899999' minute to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1008,7 +1008,7 @@ select interval 10 nanoseconds struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nError parsing ' 10 nanoseconds' to interval, invalid unit 'nanoseconds'(line 1, pos 16)\n\n== SQL ==\nselect interval 10 nanoseconds\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nError parsing ' 10 nanoseconds' to interval, invalid unit 'nanoseconds'(line 1, pos 16)\n\n== SQL ==\nselect interval 10 nanoseconds\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1097,7 +1097,7 @@ select interval struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nat least one time unit should be given for interval literal(line 1, pos 7)\n\n== SQL ==\nselect interval\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nat least one time unit should be given for interval literal(line 1, pos 7)\n\n== SQL ==\nselect interval\n-------^^^\n"},"queryContext":[]} -- !query @@ -1106,7 +1106,7 @@ select interval 1 fake_unit struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nError parsing ' 1 fake_unit' to interval, invalid unit 'fake_unit'(line 1, pos 16)\n\n== SQL ==\nselect interval 1 fake_unit\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nError parsing ' 1 fake_unit' to interval, invalid unit 'fake_unit'(line 1, pos 16)\n\n== SQL ==\nselect interval 1 fake_unit\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1115,7 +1115,7 @@ select interval 1 year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nThe value of from-to unit must be a string(line 1, pos 16)\n\n== SQL ==\nselect interval 1 year to month\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nThe value of from-to unit must be a string(line 1, pos 16)\n\n== SQL ==\nselect interval 1 year to month\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1124,7 +1124,7 @@ select interval '1' year to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nIntervals FROM year TO second are not supported.(line 1, pos 16)\n\n== SQL ==\nselect interval '1' year to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nIntervals FROM year TO second are not supported.(line 1, pos 16)\n\n== SQL ==\nselect interval '1' year to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1133,7 +1133,7 @@ select interval '10-9' year to month '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '2-1' year to month\n-------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '2-1' year to month\n-------------------------------------^^^\n"},"queryContext":[]} -- !query @@ -1142,7 +1142,7 @@ select interval '10-9' year to month '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '12:11:10' hour to second\n-------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '12:11:10' hour to second\n-------------------------------------^^^\n"},"queryContext":[]} -- !query @@ -1151,7 +1151,7 @@ select interval '1 15:11' day to minute '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 40)\n\n== SQL ==\nselect interval '1 15:11' day to minute '12:11:10' hour to second\n----------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 40)\n\n== SQL ==\nselect interval '1 15:11' day to minute '12:11:10' hour to second\n----------------------------------------^^^\n"},"queryContext":[]} -- !query @@ -1160,7 +1160,7 @@ select interval 1 year '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '2-1' year to month\n-----------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '2-1' year to month\n-----------------------^^^\n"},"queryContext":[]} -- !query @@ -1169,7 +1169,7 @@ select interval 1 year '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '12:11:10' hour to second\n-----------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '12:11:10' hour to second\n-----------------------^^^\n"},"queryContext":[]} -- !query @@ -1178,7 +1178,7 @@ select interval '10-9' year to month '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '1' year\n-------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '1' year\n-------------------------------------^^^\n"},"queryContext":[]} -- !query @@ -1187,7 +1187,7 @@ select interval '12:11:10' hour to second '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 42)\n\n== SQL ==\nselect interval '12:11:10' hour to second '1' year\n------------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 42)\n\n== SQL ==\nselect interval '12:11:10' hour to second '1' year\n------------------------------------------^^^\n"},"queryContext":[]} -- !query @@ -1196,7 +1196,7 @@ select interval (-30) day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1205,7 +1205,7 @@ select interval (a + 1) day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1214,7 +1214,7 @@ select interval 30 day day day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'day'",": extra input 'day'"],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'day'","hint":": extra input 'day'"},"queryContext":[]} -- !query @@ -1223,7 +1223,7 @@ select interval (-30) days struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1232,7 +1232,7 @@ select interval (a + 1) days struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1241,7 +1241,7 @@ select interval 30 days days days struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'days'",": extra input 'days'"],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'days'","hint":": extra input 'days'"},"queryContext":[]} -- !query @@ -1258,7 +1258,7 @@ SELECT INTERVAL '178956970-8' YEAR TO MONTH struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nError parsing interval year-month string: integer overflow(line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '178956970-8' YEAR TO MONTH\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nError parsing interval year-month string: integer overflow(line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '178956970-8' YEAR TO MONTH\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1303,7 +1303,7 @@ select struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR + '3-3 year to month')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + '3-3 year to month')' (interval year and string).; line 2 pos 2"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '2' YEAR + '3-3 year to month')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + '3-3 year to month')' (interval year and string).; line 2 pos 2"},"queryContext":[]} -- !query @@ -1328,7 +1328,7 @@ select interval '2' year + '3-3' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR + '3-3')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + '3-3')' (interval year and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '2' YEAR + '3-3')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + '3-3')' (interval year and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1337,7 +1337,7 @@ select interval '2' year - '4' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR - '4')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - '4')' (interval year and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '2' YEAR - '4')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - '4')' (interval year and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1346,7 +1346,7 @@ select '4 11:11' - interval '4 22:12' day to minute struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'4 11:11'","\"STRING\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":50,"fragment":"'4 11:11' - interval '4 22:12' day to minut"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'4 11:11'","sourceType":"\"STRING\"","targetType":"\"TIMESTAMP\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":50,"fragment":"'4 11:11' - interval '4 22:12' day to minut"}]} -- !query @@ -1355,7 +1355,7 @@ select '4 12:12:12' + interval '4 22:12' day to minute struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'4 12:12:12'","\"STRING\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":53,"fragment":"'4 12:12:12' + interval '4 22:12' day to minut"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'4 12:12:12'","sourceType":"\"STRING\"","targetType":"\"TIMESTAMP\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":53,"fragment":"'4 12:12:12' + interval '4 22:12' day to minut"}]} -- !query @@ -1372,7 +1372,7 @@ select interval '2' year + str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR + interval_view.str)' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + interval_view.str)' (interval year and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '2' YEAR + interval_view.str)' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + interval_view.str)' (interval year and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1381,7 +1381,7 @@ select interval '2' year - str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR - interval_view.str)' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - interval_view.str)' (interval year and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '2' YEAR - interval_view.str)' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - interval_view.str)' (interval year and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1390,7 +1390,7 @@ select str - interval '4 22:12' day to minute from interval_view struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1'","\"STRING\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":44,"fragment":"str - interval '4 22:12' day to minut"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'1'","sourceType":"\"STRING\"","targetType":"\"TIMESTAMP\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":44,"fragment":"str - interval '4 22:12' day to minut"}]} -- !query @@ -1399,7 +1399,7 @@ select str + interval '4 22:12' day to minute from interval_view struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1'","\"STRING\"","\"TIMESTAMP\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":44,"fragment":"str + interval '4 22:12' day to minut"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'1'","sourceType":"\"STRING\"","targetType":"\"TIMESTAMP\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":44,"fragment":"str + interval '4 22:12' day to minut"}]} -- !query @@ -1408,7 +1408,7 @@ select interval '2-2' year to month + interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1417,7 +1417,7 @@ select interval '3' day + interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1426,7 +1426,7 @@ select interval '2-2' year to month - interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1435,7 +1435,7 @@ select interval '3' day - interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' due to data type mismatch: differing types in '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' (interval day and interval year to month).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' due to data type mismatch: differing types in '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' (interval day and interval year to month).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1444,7 +1444,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1453,7 +1453,7 @@ select 1 + interval '2' month struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(1 + INTERVAL '2' MONTH)' due to data type mismatch: differing types in '(1 + INTERVAL '2' MONTH)' (int and interval month).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(1 + INTERVAL '2' MONTH)' due to data type mismatch: differing types in '(1 + INTERVAL '2' MONTH)' (int and interval month).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1462,7 +1462,7 @@ select interval '2' second + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '1 + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '1 + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1471,7 +1471,7 @@ select interval '2' month - 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' MONTH - 1)' due to data type mismatch: differing types in '(INTERVAL '2' MONTH - 1)' (interval month and int).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '2' MONTH - 1)' due to data type mismatch: differing types in '(INTERVAL '2' MONTH - 1)' (interval month and int).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1528,7 +1528,7 @@ select interval '-\t2-2\t' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match year-month format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: -\t2-2\t(line 1, pos 16)\n\n== SQL ==\nselect interval '-\\t2-2\\t' year to month\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match year-month format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: -\t2-2\t(line 1, pos 16)\n\n== SQL ==\nselect interval '-\\t2-2\\t' year to month\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1545,7 +1545,7 @@ select interval '\n-\t10\t 12:34:46.789\t' day to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: \n-\t10\t 12:34:46.789\t, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '\\n-\\t10\\t 12:34:46.789\\t' day to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: \n-\t10\t 12:34:46.789\t, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '\\n-\\t10\\t 12:34:46.789\\t' day to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1554,7 +1554,7 @@ select interval '中文 interval 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 中文 interval 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval '中文 interval 1 day'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: 中文 interval 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval '中文 interval 1 day'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1563,7 +1563,7 @@ select interval 'interval中文 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: interval中文 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval中文 1 day'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: interval中文 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval中文 1 day'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1572,7 +1572,7 @@ select interval 'interval 1中文day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: interval 1中文day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval 1中文day'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: interval 1中文day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval 1中文day'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1581,7 +1581,7 @@ select -(a) from values (interval '-2147483648 months', interval '2147483647 mon struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow",""],"queryContext":[]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"integer overflow","alternative":""},"queryContext":[]} -- !query @@ -1590,7 +1590,7 @@ select a - b from values (interval '-2147483648 months', interval '2147483647 mo struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_subtract' to tolerate overflow and return NULL instead."],"queryContext":[]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"integer overflow","alternative":" Use 'try_subtract' to tolerate overflow and return NULL instead."},"queryContext":[]} -- !query @@ -1599,7 +1599,7 @@ select b + interval '1 month' from values (interval '-2147483648 months', interv struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_add' to tolerate overflow and return NULL instead."],"queryContext":[]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"integer overflow","alternative":" Use 'try_add' to tolerate overflow and return NULL instead."},"queryContext":[]} -- !query @@ -1608,7 +1608,7 @@ select a * 1.1 from values (interval '-2147483648 months', interval '2147483647 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Overflow"},"queryContext":[]} -- !query @@ -1617,7 +1617,7 @@ select a / 0.5 from values (interval '-2147483648 months', interval '2147483647 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Overflow"},"queryContext":[]} -- !query @@ -1678,7 +1678,7 @@ select interval '+' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: +(line 1, pos 7)\n\n== SQL ==\nselect interval '+'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: +(line 1, pos 7)\n\n== SQL ==\nselect interval '+'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1687,7 +1687,7 @@ select interval '+.' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: +.(line 1, pos 7)\n\n== SQL ==\nselect interval '+.'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: +.(line 1, pos 7)\n\n== SQL ==\nselect interval '+.'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1696,7 +1696,7 @@ select interval '1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1705,7 +1705,7 @@ select interval '1.2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1.2(line 1, pos 7)\n\n== SQL ==\nselect interval '1.2'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: 1.2(line 1, pos 7)\n\n== SQL ==\nselect interval '1.2'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1714,7 +1714,7 @@ select interval '- 2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: - 2(line 1, pos 7)\n\n== SQL ==\nselect interval '- 2'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: - 2(line 1, pos 7)\n\n== SQL ==\nselect interval '- 2'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1723,7 +1723,7 @@ select interval '1 day -' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1 day -(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day -'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: 1 day -(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day -'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1732,7 +1732,7 @@ select interval '1 day 1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1 day 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day 1'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: 1 day 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day 1'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1741,7 +1741,7 @@ select interval '1 day 2' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: 1 day 2(line 1, pos 16)\n\n== SQL ==\nselect interval '1 day 2' day\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: 1 day 2(line 1, pos 16)\n\n== SQL ==\nselect interval '1 day 2' day\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1750,7 +1750,7 @@ select interval 'interval 1' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: interval 1(line 1, pos 16)\n\n== SQL ==\nselect interval 'interval 1' day\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: interval 1(line 1, pos 16)\n\n== SQL ==\nselect interval 'interval 1' day\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1783,7 +1783,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":50,"fragment":"(INTERVAL '-178956970-8' YEAR TO MONTH) / -"}]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"Interval value overflows after being divided by -1","alternative":" Use 'try_divide' to tolerate overflow and return NULL instead."},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":50,"fragment":"(INTERVAL '-178956970-8' YEAR TO MONTH) / -"}]} -- !query @@ -1792,7 +1792,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":51,"fragment":"(INTERVAL '-178956970-8' YEAR TO MONTH) / -1"}]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"Interval value overflows after being divided by -1","alternative":" Use 'try_divide' to tolerate overflow and return NULL instead."},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":51,"fragment":"(INTERVAL '-178956970-8' YEAR TO MONTH) / -1"}]} -- !query @@ -1801,7 +1801,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1.0 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Overflow"},"queryContext":[]} -- !query @@ -1810,7 +1810,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1.0D struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["not in range"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"not in range"},"queryContext":[]} -- !query @@ -1835,7 +1835,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":64,"fragment":"(INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -"}]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"Interval value overflows after being divided by -1","alternative":" Use 'try_divide' to tolerate overflow and return NULL instead."},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":64,"fragment":"(INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -"}]} -- !query @@ -1844,7 +1844,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":65,"fragment":"(INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1"}]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"Interval value overflows after being divided by -1","alternative":" Use 'try_divide' to tolerate overflow and return NULL instead."},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":65,"fragment":"(INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1"}]} -- !query @@ -1853,7 +1853,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Overflow"},"queryContext":[]} -- !query @@ -1862,7 +1862,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0D struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["not in range"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"not in range"},"queryContext":[]} -- !query @@ -1967,7 +1967,7 @@ SELECT INTERVAL '106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '106751992 04' DAY TO HOUR\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '106751992 04' DAY TO HOUR\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1976,7 +1976,7 @@ SELECT INTERVAL '-106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-106751992 04' DAY TO HOUR\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-106751992 04' DAY TO HOUR\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1985,7 +1985,7 @@ SELECT INTERVAL '2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '2562047789:00' HOUR TO MINUTE\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '2562047789:00' HOUR TO MINUTE\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1994,7 +1994,7 @@ SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-2562047789:00' HOUR TO MINUTE\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-2562047789:00' HOUR TO MINUTE\n----------------^^^\n"},"queryContext":[]} -- !query @@ -2003,7 +2003,7 @@ SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n"},"queryContext":[]} -- !query @@ -2012,7 +2012,7 @@ SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n"},"queryContext":[]} -- !query @@ -2149,7 +2149,7 @@ SELECT INTERVAL 1 MONTH > INTERVAL 20 DAYS struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' (interval month and interval day).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' (interval month and interval day).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2158,7 +2158,7 @@ SELECT INTERVAL '1' DAY < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' DAY < '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY < '1')' (interval day and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' DAY < '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY < '1')' (interval day and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2167,7 +2167,7 @@ SELECT INTERVAL '1' DAY = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' DAY = '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY = '1')' (interval day and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' DAY = '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY = '1')' (interval day and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2176,7 +2176,7 @@ SELECT INTERVAL '1' DAY > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' DAY > '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY > '1')' (interval day and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' DAY > '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY > '1')' (interval day and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2185,7 +2185,7 @@ SELECT '1' < INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' < INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' < INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' < INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' < INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2194,7 +2194,7 @@ SELECT '1' = INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' = INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' = INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' = INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' = INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2203,7 +2203,7 @@ SELECT '1' > INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' > INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' > INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' > INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' > INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2212,7 +2212,7 @@ SELECT INTERVAL '1' YEAR < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' YEAR < '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR < '1')' (interval year and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' YEAR < '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR < '1')' (interval year and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2221,7 +2221,7 @@ SELECT INTERVAL '1' YEAR = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' YEAR = '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR = '1')' (interval year and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' YEAR = '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR = '1')' (interval year and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2230,7 +2230,7 @@ SELECT INTERVAL '1' YEAR > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' YEAR > '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR > '1')' (interval year and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' YEAR > '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR > '1')' (interval year and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2239,7 +2239,7 @@ SELECT '1' < INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' < INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' < INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' < INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' < INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2248,7 +2248,7 @@ SELECT '1' = INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' = INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' = INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' = INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' = INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2257,7 +2257,7 @@ SELECT '1' > INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' > INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' > INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' > INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' > INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2282,7 +2282,7 @@ SELECT array(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'array(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function array should all be the same type, but it's [interval month, interval day]; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'array(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function array should all be the same type, but it's [interval month, interval day]; line 1 pos 7"},"queryContext":[]} -- !query @@ -2307,7 +2307,7 @@ SELECT coalesce(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function coalesce should all be the same type, but it's [interval month, interval day]; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function coalesce should all be the same type, but it's [interval month, interval day]; line 1 pos 7"},"queryContext":[]} -- !query @@ -2364,7 +2364,7 @@ SELECT div(INTERVAL '1' MONTH, INTERVAL '-1' DAY) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' (interval month and interval day).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' (interval month and interval day).; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out index 3085fdaeac7b1..3562286a2734d 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out @@ -37,7 +37,7 @@ select 128Y struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nNumeric literal 128 does not fit in range [-128, 127] for type tinyint(line 1, pos 7)\n\n== SQL ==\nselect 128Y\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nNumeric literal 128 does not fit in range [-128, 127] for type tinyint(line 1, pos 7)\n\n== SQL ==\nselect 128Y\n-------^^^\n"},"queryContext":[]} -- !query @@ -62,7 +62,7 @@ select 32768S struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nNumeric literal 32768 does not fit in range [-32768, 32767] for type smallint(line 1, pos 7)\n\n== SQL ==\nselect 32768S\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nNumeric literal 32768 does not fit in range [-32768, 32767] for type smallint(line 1, pos 7)\n\n== SQL ==\nselect 32768S\n-------^^^\n"},"queryContext":[]} -- !query @@ -87,7 +87,7 @@ select 9223372036854775808L struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nNumeric literal 9223372036854775808 does not fit in range [-9223372036854775808, 9223372036854775807] for type bigint(line 1, pos 7)\n\n== SQL ==\nselect 9223372036854775808L\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nNumeric literal 9223372036854775808 does not fit in range [-9223372036854775808, 9223372036854775807] for type bigint(line 1, pos 7)\n\n== SQL ==\nselect 9223372036854775808L\n-------^^^\n"},"queryContext":[]} -- !query @@ -128,7 +128,7 @@ select 1234567890123456789012345678901234567890 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890"},"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select 1234567890123456789012345678901234567890.0 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890.0"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890.0"},"queryContext":[]} -- !query @@ -162,7 +162,7 @@ select -3.4028235E39f struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nNumeric literal -3.4028235E39 does not fit in range [-3.4028234663852886E+38, 3.4028234663852886E+38] for type float(line 1, pos 7)\n\n== SQL ==\nselect -3.4028235E39f\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nNumeric literal -3.4028235E39 does not fit in range [-3.4028234663852886E+38, 3.4028234663852886E+38] for type float(line 1, pos 7)\n\n== SQL ==\nselect -3.4028235E39f\n-------^^^\n"},"queryContext":[]} -- !query @@ -187,7 +187,7 @@ select .e3 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'.'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'.'","hint":""},"queryContext":[]} -- !query @@ -196,7 +196,7 @@ select 1E309, -1E309 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nNumeric literal 1E309 does not fit in range [-1.7976931348623157E+308, 1.7976931348623157E+308] for type double(line 1, pos 7)\n\n== SQL ==\nselect 1E309, -1E309\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nNumeric literal 1E309 does not fit in range [-1.7976931348623157E+308, 1.7976931348623157E+308] for type double(line 1, pos 7)\n\n== SQL ==\nselect 1E309, -1E309\n-------^^^\n"},"queryContext":[]} -- !query @@ -295,7 +295,7 @@ select date 'mar 11 2016' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: mar 11 2016(line 1, pos 7)\n\n== SQL ==\nselect date 'mar 11 2016'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: mar 11 2016(line 1, pos 7)\n\n== SQL ==\nselect date 'mar 11 2016'\n-------^^^\n"},"queryContext":[]} -- !query @@ -312,7 +312,7 @@ select timestamp '2016-33-11 20:54:00.000' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2016-33-11 20:54:00.000(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2016-33-11 20:54:00.000'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 2016-33-11 20:54:00.000(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2016-33-11 20:54:00.000'\n-------^^^\n"},"queryContext":[]} -- !query @@ -321,7 +321,7 @@ select GEO '(10,-6)' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nLiterals of type 'GEO' are currently not supported.(line 1, pos 7)\n\n== SQL ==\nselect GEO '(10,-6)'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nLiterals of type 'GEO' are currently not supported.(line 1, pos 7)\n\n== SQL ==\nselect GEO '(10,-6)'\n-------^^^\n"},"queryContext":[]} -- !query @@ -338,7 +338,7 @@ select 1.20E-38BD struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38(line 1, pos 7)\n\n== SQL ==\nselect 1.20E-38BD\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\ndecimal can only support precision up to 38(line 1, pos 7)\n\n== SQL ==\nselect 1.20E-38BD\n-------^^^\n"},"queryContext":[]} -- !query @@ -355,7 +355,7 @@ select X'XuZ' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\ncontains illegal character for hexBinary: 0XuZ(line 1, pos 7)\n\n== SQL ==\nselect X'XuZ'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\ncontains illegal character for hexBinary: 0XuZ(line 1, pos 7)\n\n== SQL ==\nselect X'XuZ'\n-------^^^\n"},"queryContext":[]} -- !query @@ -372,7 +372,7 @@ select +date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(+ DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(+ DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -381,7 +381,7 @@ select +timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -398,7 +398,7 @@ select +map(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(+ map(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'map(1, 2)' is of map type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(+ map(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'map(1, 2)' is of map type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -407,7 +407,7 @@ select +array(1,2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(+ array(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'array(1, 2)' is of array type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(+ array(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'array(1, 2)' is of array type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -416,7 +416,7 @@ select +named_struct('a', 1, 'b', 'spark') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(+ named_struct('a', 1, 'b', 'spark'))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'named_struct('a', 1, 'b', 'spark')' is of struct type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(+ named_struct('a', 1, 'b', 'spark'))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'named_struct('a', 1, 'b', 'spark')' is of struct type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -425,7 +425,7 @@ select +X'1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(+ X'01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'01'' is of binary type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(+ X'01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'01'' is of binary type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -434,7 +434,7 @@ select -date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(- DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(- DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -443,7 +443,7 @@ select -timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(- TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(- TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -452,4 +452,4 @@ select -x'2379ACFe' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(- X'2379ACFE')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'2379ACFE'' is of binary type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(- X'2379ACFE')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'2379ACFE'' is of binary type.; line 1 pos 7"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out index 41ab62980d8f0..248e1beb0660d 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out @@ -5,7 +5,7 @@ select element_at(map(1, 'a', 2, 'b'), 5) struct<> -- !query output org.apache.spark.SparkNoSuchElementException -{"errorClass":"MAP_KEY_DOES_NOT_EXIST","messageParameters":["5","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":40,"fragment":"element_at(map(1, 'a', 2, 'b'), 5"}]} +{"errorClass":"MAP_KEY_DOES_NOT_EXIST","messageParameters":{"keyValue":"5","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":40,"fragment":"element_at(map(1, 'a', 2, 'b'), 5"}]} -- !query @@ -14,7 +14,7 @@ select map(1, 'a', 2, 'b')[5] struct<> -- !query output org.apache.spark.SparkNoSuchElementException -{"errorClass":"MAP_KEY_DOES_NOT_EXIST","messageParameters":["5","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":28,"fragment":"map(1, 'a', 2, 'b')[5"}]} +{"errorClass":"MAP_KEY_DOES_NOT_EXIST","messageParameters":{"keyValue":"5","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":28,"fragment":"map(1, 'a', 2, 'b')[5"}]} -- !query @@ -71,7 +71,7 @@ select map_contains_key(map('1', 'a', '2', 'b'), 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'map_contains_key(map('1', 'a', '2', 'b'), 1)' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, int].; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'map_contains_key(map('1', 'a', '2', 'b'), 1)' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, int].; line 1 pos 7"},"queryContext":[]} -- !query @@ -80,7 +80,7 @@ select map_contains_key(map(1, 'a', 2, 'b'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'map_contains_key(map(1, 'a', 2, 'b'), '1')' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, string].; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'map_contains_key(map(1, 'a', 2, 'b'), '1')' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, string].; line 1 pos 7"},"queryContext":[]} -- !query @@ -105,7 +105,7 @@ select element_at(map(1, 'a', 2, 'b'), 5) struct<> -- !query output org.apache.spark.SparkNoSuchElementException -{"errorClass":"MAP_KEY_DOES_NOT_EXIST","messageParameters":["5","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":40,"fragment":"element_at(map(1, 'a', 2, 'b'), 5"}]} +{"errorClass":"MAP_KEY_DOES_NOT_EXIST","messageParameters":{"keyValue":"5","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":40,"fragment":"element_at(map(1, 'a', 2, 'b'), 5"}]} -- !query @@ -114,4 +114,4 @@ select element_at(map('a', 1, 'b', 2), 'c') struct<> -- !query output org.apache.spark.SparkNoSuchElementException -{"errorClass":"MAP_KEY_DOES_NOT_EXIST","messageParameters":["'c'","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":42,"fragment":"element_at(map('a', 1, 'b', 2), 'c'"}]} +{"errorClass":"MAP_KEY_DOES_NOT_EXIST","messageParameters":{"keyValue":"'c'","config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":42,"fragment":"element_at(map('a', 1, 'b', 2), 'c'"}]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out index b4f57d5f23123..70ecc886fd417 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out @@ -5,7 +5,7 @@ select concat_ws() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["requirement failed: concat_ws requires at least one argument.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"requirement failed: concat_ws requires at least one argument.; line 1 pos 7"},"queryContext":[]} -- !query @@ -14,7 +14,7 @@ select format_string() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["requirement failed: format_string() should take at least 1 argument; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"requirement failed: format_string() should take at least 1 argument; line 1 pos 7"},"queryContext":[]} -- !query @@ -79,7 +79,7 @@ select left("abcd", -2), left("abcd", 0), left("abcd", 'a') struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":42,"stopIndex":58,"fragment":"left(\"abcd\", 'a'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'a'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":42,"stopIndex":58,"fragment":"left(\"abcd\", 'a'"}]} -- !query @@ -104,7 +104,7 @@ select right("abcd", -2), right("abcd", 0), right("abcd", 'a') struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'a'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":44,"stopIndex":61,"fragment":"right(\"abcd\", 'a'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'a'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":44,"stopIndex":61,"fragment":"right(\"abcd\", 'a'"}]} -- !query @@ -169,7 +169,7 @@ SELECT split_part('11.12.13', '.', 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":{},"queryContext":[]} -- !query @@ -410,7 +410,7 @@ SELECT lpad('hi', 'invalid_length') struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'invalid_length'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":34,"fragment":"lpad('hi', 'invalid_length'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'invalid_length'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":34,"fragment":"lpad('hi', 'invalid_length'"}]} -- !query @@ -419,7 +419,7 @@ SELECT rpad('hi', 'invalid_length') struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'invalid_length'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":34,"fragment":"rpad('hi', 'invalid_length'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'invalid_length'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":34,"fragment":"rpad('hi', 'invalid_length'"}]} -- !query @@ -652,7 +652,7 @@ select decode() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7"},"queryContext":[]} -- !query @@ -661,7 +661,7 @@ select decode(encode('abc', 'utf-8')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7"},"queryContext":[]} -- !query @@ -1126,7 +1126,7 @@ select to_binary(null, cast(null as int)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1135,7 +1135,7 @@ select to_binary('abc', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1144,7 +1144,7 @@ select to_binary('abc', 'invalidFormat') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid value for the 'format' parameter of function 'to_binary': invalidformat. The value has to be a case-insensitive string literal of 'hex', 'utf-8', or 'base64'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid value for the 'format' parameter of function 'to_binary': invalidformat. The value has to be a case-insensitive string literal of 'hex', 'utf-8', or 'base64'."},"queryContext":[]} -- !query @@ -1153,4 +1153,4 @@ select to_binary('a!', 'base64') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Last unit does not have enough valid bits"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Last unit does not have enough valid bits"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index b8fc7da629f7e..d4222dc786715 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -13,7 +13,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"},"queryContext":[]} -- !query @@ -22,7 +22,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"},"queryContext":[]} -- !query @@ -31,7 +31,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"},"queryContext":[]} -- !query @@ -80,7 +80,7 @@ SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007) struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"INVALID_FRACTION_OF_SECOND","sqlState":"22023","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"INVALID_FRACTION_OF_SECOND","sqlState":"22023","messageParameters":{"ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -105,7 +105,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 61) struct<> -- !query output java.time.DateTimeException -{"errorClass":"legacy","messageParameters":["Invalid value for SecondOfMinute (valid values 0 - 59): 61. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid value for SecondOfMinute (valid values 0 - 59): 61. If necessary set spark.sql.ansi.enabled to false to bypass this error."},"queryContext":[]} -- !query @@ -130,7 +130,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 99.999999) struct<> -- !query output java.time.DateTimeException -{"errorClass":"legacy","messageParameters":["Invalid value for SecondOfMinute (valid values 0 - 59): 99. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid value for SecondOfMinute (valid values 0 - 59): 99. If necessary set spark.sql.ansi.enabled to false to bypass this error."},"queryContext":[]} -- !query @@ -139,7 +139,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 999.999999) struct<> -- !query output java.time.DateTimeException -{"errorClass":"legacy","messageParameters":["Invalid value for SecondOfMinute (valid values 0 - 59): 999. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid value for SecondOfMinute (valid values 0 - 59): 999. If necessary set spark.sql.ansi.enabled to false to bypass this error."},"queryContext":[]} -- !query @@ -180,7 +180,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -189,7 +189,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -198,7 +198,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -207,7 +207,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -216,7 +216,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Rounding necessary"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Rounding necessary"},"queryContext":[]} -- !query @@ -322,7 +322,7 @@ select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2019-10-06 10:11:12.' could not be parsed at index 20","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '2019-10-06 10:11:12.' could not be parsed at index 20","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -387,7 +387,7 @@ select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSS struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2019-10-06 10:11:12.1234567PST' could not be parsed, unparsed text found at index 26","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '2019-10-06 10:11:12.1234567PST' could not be parsed, unparsed text found at index 26","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -404,7 +404,7 @@ select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd H struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '223456 2019-10-06 10:11:12.123456PST' could not be parsed at index 27","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '223456 2019-10-06 10:11:12.123456PST' could not be parsed at index 27","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -469,7 +469,7 @@ select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '12.1232019-10-06S10:11' could not be parsed at index 7","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '12.1232019-10-06S10:11' could not be parsed at index 7","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -478,7 +478,7 @@ select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '12.1232019-10-06S10:11' could not be parsed at index 9","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '12.1232019-10-06S10:11' could not be parsed at index 9","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -551,7 +551,7 @@ select to_timestamp("02-29", "MM-dd") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Invalid date 'February 29' as '1970' is not a leap year","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Invalid date 'February 29' as '1970' is not a leap year","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -656,7 +656,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP))' due to data type mismatch: '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP))' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP))' due to data type mismatch: '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP))' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp; line 1 pos 7"},"queryContext":[]} -- !query @@ -665,7 +665,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS TIMESTAMP) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: '(CAST('1' AS TIMESTAMP) + TIMESTAMP '2011-11-11 11:11:11')' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS TIMESTAMP) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: '(CAST('1' AS TIMESTAMP) + TIMESTAMP '2011-11-11 11:11:11')' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp; line 1 pos 7"},"queryContext":[]} -- !query @@ -674,7 +674,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7"},"queryContext":[]} -- !query @@ -683,7 +683,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -715,7 +715,7 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'yyyy-MM-dd GGGGG'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'yyyy-MM-dd GGGGG'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -724,7 +724,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd MM yyyy EEEEEE'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -733,7 +733,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd MM yyyy EEEEE'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -742,7 +742,7 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd MM yyyy EEEEE'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -751,7 +751,7 @@ select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat' struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd/MMMMM/yyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -760,7 +760,7 @@ select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMM struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd/MMMMM/yyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out index 758351f01b2e4..5c299396ace64 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out @@ -141,7 +141,7 @@ SELECT try_add(interval 2 year, interval 2 second) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2' YEAR + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2' YEAR' is of interval year type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'INTERVAL '2' YEAR + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2' YEAR' is of interval year type.; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out index 2a7e37edccaf8..c9de17947a8aa 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/try_datetime_functions.sql.out @@ -45,4 +45,4 @@ select try_to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd MM yyyy EEEEEE'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_element_at.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/try_element_at.sql.out index 21b1f73b85f6c..a2e3fe1eaa783 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_element_at.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/try_element_at.sql.out @@ -5,7 +5,7 @@ SELECT try_element_at(array(1, 2, 3), 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":{},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/array.sql.out b/sql/core/src/test/resources/sql-tests/results/array.sql.out index 776ebb8598bf5..8e85940f2d15a 100644 --- a/sql/core/src/test/resources/sql-tests/results/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/array.sql.out @@ -128,7 +128,7 @@ select sort_array(array('b', 'd'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'sort_array(array('b', 'd'), '1')' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'sort_array(array('b', 'd'), '1')' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select sort_array(array('b', 'd'), cast(NULL as boolean)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'sort_array(array('b', 'd'), CAST(NULL AS BOOLEAN))' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'sort_array(array('b', 'd'), CAST(NULL AS BOOLEAN))' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -181,7 +181,7 @@ select element_at(array(1, 2, 3), 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":{},"queryContext":[]} -- !query @@ -294,4 +294,4 @@ select array_size(map('a', 1, 'b', 2)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'array_size(map('a', 1, 'b', 2))' due to data type mismatch: argument 1 requires array type, however, 'map('a', 1, 'b', 2)' is of map type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'array_size(map('a', 1, 'b', 2))' due to data type mismatch: argument 1 requires array type, however, 'map('a', 1, 'b', 2)' is of map type.; line 1 pos 7"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out b/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out index 4eb71a3903891..6546750ccd3b5 100644 --- a/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out @@ -149,7 +149,7 @@ select bit_count("bit count") struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'bit_count('bit count')' due to data type mismatch: argument 1 requires (integral or boolean) type, however, ''bit count'' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'bit_count('bit count')' due to data type mismatch: argument 1 requires (integral or boolean) type, however, ''bit count'' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -158,7 +158,7 @@ select bit_count('a') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'bit_count('a')' due to data type mismatch: argument 1 requires (integral or boolean) type, however, ''a'' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'bit_count('a')' due to data type mismatch: argument 1 requires (integral or boolean) type, however, ''a'' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -260,7 +260,7 @@ select getbit(11L, -1) struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Invalid bit position: -1 is less than zero"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid bit position: -1 is less than zero"},"queryContext":[]} -- !query @@ -269,4 +269,4 @@ select getbit(11L, 64) struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Invalid bit position: 64 exceeds the bit upper limit"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid bit position: 64 exceeds the bit upper limit"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/cast.sql.out index 8027c8e4b4de5..8e94b3ac2c839 100644 --- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out @@ -350,7 +350,7 @@ SELECT CAST(interval 3 month 1 hour AS string) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval 3 month 1 hour(line 1, pos 12)\n\n== SQL ==\nSELECT CAST(interval 3 month 1 hour AS string)\n------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval 3 month 1 hour(line 1, pos 12)\n\n== SQL ==\nSELECT CAST(interval 3 month 1 hour AS string)\n------------^^^\n"},"queryContext":[]} -- !query @@ -626,7 +626,7 @@ select cast(interval '23:59:59' hour to second as smallint) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["INTERVAL '23:59:59' HOUR TO SECOND","\"INTERVAL HOUR TO SECOND\"","\"SMALLINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"INTERVAL '23:59:59' HOUR TO SECOND","sourceType":"\"INTERVAL HOUR TO SECOND\"","targetType":"\"SMALLINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -651,7 +651,7 @@ select cast(interval '-1000' month as tinyint) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["INTERVAL '-1000' MONTH","\"INTERVAL MONTH\"","\"TINYINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"INTERVAL '-1000' MONTH","sourceType":"\"INTERVAL MONTH\"","targetType":"\"TINYINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -660,7 +660,7 @@ select cast(interval '1000000' second as smallint) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["INTERVAL '1000000' SECOND","\"INTERVAL SECOND\"","\"SMALLINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"INTERVAL '1000000' SECOND","sourceType":"\"INTERVAL SECOND\"","targetType":"\"SMALLINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -725,7 +725,7 @@ select cast(2147483647 as interval year) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["2147483647","\"INT\"","\"INTERVAL YEAR\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"2147483647","sourceType":"\"INT\"","targetType":"\"INTERVAL YEAR\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -734,7 +734,7 @@ select cast(-9223372036854775808L as interval day) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["-9223372036854775808L","\"BIGINT\"","\"INTERVAL DAY\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"-9223372036854775808L","sourceType":"\"BIGINT\"","targetType":"\"INTERVAL DAY\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -799,4 +799,4 @@ select cast(interval '10.123' second as decimal(1, 0)) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":["Decimal(compact, 10, 18, 6)","1","0","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_CHANGE_DECIMAL_PRECISION","sqlState":"22005","messageParameters":{"value":"Decimal(compact, 10, 18, 6)","precision":"1","scale":"0","config":"\"spark.sql.ansi.enabled\""},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out index d073a93a4eb15..22b5a57e64c75 100644 --- a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out @@ -93,7 +93,7 @@ SELECT CEIL(2.5, null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The 'scale' parameter of function 'ceil' needs to be a int literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The 'scale' parameter of function 'ceil' needs to be a int literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -102,7 +102,7 @@ SELECT CEIL(2.5, 'a') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The 'scale' parameter of function 'ceil' needs to be a int literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The 'scale' parameter of function 'ceil' needs to be a int literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -111,7 +111,7 @@ SELECT CEIL(2.5, 0, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function ceil. Expected: 2; Found: 3; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function ceil. Expected: 2; Found: 3; line 1 pos 7"},"queryContext":[]} -- !query @@ -208,7 +208,7 @@ SELECT FLOOR(2.5, null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The 'scale' parameter of function 'floor' needs to be a int literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The 'scale' parameter of function 'floor' needs to be a int literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -217,7 +217,7 @@ SELECT FLOOR(2.5, 'a') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The 'scale' parameter of function 'floor' needs to be a int literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The 'scale' parameter of function 'floor' needs to be a int literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -226,4 +226,4 @@ SELECT FLOOR(2.5, 0, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function floor. Expected: 2; Found: 3; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function floor. Expected: 2; Found: 3; line 1 pos 7"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out index e9c050bc5704b..2436be6a64d2d 100644 --- a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out @@ -23,7 +23,7 @@ ALTER TABLE test_change CHANGE a struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nOperation not allowed: ALTER TABLE table CHANGE COLUMN requires a TYPE, a SET/DROP, a COMMENT, or a FIRST/AFTER(line 1, pos 0)\n\n== SQL ==\nALTER TABLE test_change CHANGE a\n^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nOperation not allowed: ALTER TABLE table CHANGE COLUMN requires a TYPE, a SET/DROP, a COMMENT, or a FIRST/AFTER(line 1, pos 0)\n\n== SQL ==\nALTER TABLE test_change CHANGE a\n^^^\n"},"queryContext":[]} -- !query @@ -42,7 +42,7 @@ ALTER TABLE test_change RENAME COLUMN a TO a1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["RENAME COLUMN is only supported with v2 tables."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"RENAME COLUMN is only supported with v2 tables."},"queryContext":[]} -- !query @@ -61,7 +61,7 @@ ALTER TABLE test_change CHANGE a TYPE STRING struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["ALTER TABLE CHANGE COLUMN is not supported for changing column 'a' with type 'IntegerType' to 'a' with type 'StringType'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"ALTER TABLE CHANGE COLUMN is not supported for changing column 'a' with type 'IntegerType' to 'a' with type 'StringType'"},"queryContext":[]} -- !query @@ -80,7 +80,7 @@ ALTER TABLE test_change CHANGE a AFTER b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["ALTER COLUMN ... FIRST | ALTER is only supported with v2 tables."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"ALTER COLUMN ... FIRST | ALTER is only supported with v2 tables."},"queryContext":[]} -- !query @@ -89,7 +89,7 @@ ALTER TABLE test_change CHANGE b FIRST struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["ALTER COLUMN ... FIRST | ALTER is only supported with v2 tables."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"ALTER COLUMN ... FIRST | ALTER is only supported with v2 tables."},"queryContext":[]} -- !query @@ -168,7 +168,7 @@ ALTER TABLE test_change CHANGE invalid_col TYPE INT struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Missing field invalid_col in table spark_catalog.default.test_change with schema:\nroot\n |-- a: integer (nullable = true)\n |-- b: string (nullable = true)\n |-- c: integer (nullable = true)\n; line 1 pos 0"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Missing field invalid_col in table spark_catalog.default.test_change with schema:\nroot\n |-- a: integer (nullable = true)\n |-- b: string (nullable = true)\n |-- c: integer (nullable = true)\n; line 1 pos 0"},"queryContext":[]} -- !query @@ -213,7 +213,7 @@ ALTER TABLE temp_view CHANGE a TYPE INT struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["temp_view is a temp view. 'ALTER TABLE ... CHANGE COLUMN' expects a table.; line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"temp_view is a temp view. 'ALTER TABLE ... CHANGE COLUMN' expects a table.; line 1 pos 12"},"queryContext":[]} -- !query @@ -230,7 +230,7 @@ ALTER TABLE global_temp.global_temp_view CHANGE a TYPE INT struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["global_temp.global_temp_view is a temp view. 'ALTER TABLE ... CHANGE COLUMN' expects a table.; line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"global_temp.global_temp_view is a temp view. 'ALTER TABLE ... CHANGE COLUMN' expects a table.; line 1 pos 12"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out index 58cb227982e0f..776a1cdca3d63 100644 --- a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out @@ -259,7 +259,7 @@ alter table char_tbl1 change column c type char(6) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["ALTER TABLE CHANGE COLUMN is not supported for changing column 'c' with type 'CharType(5)' to 'c' with type 'CharType(6)'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"ALTER TABLE CHANGE COLUMN is not supported for changing column 'c' with type 'CharType(5)' to 'c' with type 'CharType(6)'"},"queryContext":[]} -- !query @@ -575,7 +575,7 @@ alter table char_part partition (v2='ke') rename to partition (v2='nt') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Partition spec is invalid. The spec (v2) must match the partition spec (v2, c2) defined in table '`spark_catalog`.`default`.`char_part`'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Partition spec is invalid. The spec (v2) must match the partition spec (v2, c2) defined in table '`spark_catalog`.`default`.`char_part`'"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out index d8d8e366ca878..9f93dbe076b87 100644 --- a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out @@ -69,7 +69,7 @@ SELECT i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"},"queryContext":[]} -- !query @@ -78,7 +78,7 @@ SELECT t1.i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"},"queryContext":[]} -- !query @@ -87,7 +87,7 @@ SELECT mydb1.t1.i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'mydb1.t1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'mydb1.t1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"},"queryContext":[]} -- !query @@ -96,7 +96,7 @@ SELECT i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7"},"queryContext":[]} -- !query @@ -105,7 +105,7 @@ SELECT t1.i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7"},"queryContext":[]} -- !query @@ -122,7 +122,7 @@ SELECT i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"},"queryContext":[]} -- !query @@ -131,7 +131,7 @@ SELECT t1.i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7"},"queryContext":[]} -- !query @@ -140,7 +140,7 @@ SELECT i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7"},"queryContext":[]} -- !query @@ -149,7 +149,7 @@ SELECT t1.i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7"},"queryContext":[]} -- !query @@ -158,7 +158,7 @@ SELECT db1.t1.i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`db1`.`t1`.`i1`","`spark_catalog`.`mydb2`.`t1`.`i1`, `spark_catalog`.`mydb2`.`t1`.`i1`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`db1`.`t1`.`i1`","objectList":"`spark_catalog`.`mydb2`.`t1`.`i1`, `spark_catalog`.`mydb2`.`t1`.`i1`"},"queryContext":[]} -- !query @@ -183,7 +183,7 @@ SELECT mydb1.t1 FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`mydb1`.`t1`","`spark_catalog`.`mydb1`.`t1`.`i1`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`mydb1`.`t1`","objectList":"`spark_catalog`.`mydb1`.`t1`.`i1`"},"queryContext":[]} -- !query @@ -192,7 +192,7 @@ SELECT t1.x.y.* FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 't1.x.y.*' given input columns 'i1'; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 't1.x.y.*' given input columns 'i1'; line 1 pos 7"},"queryContext":[]} -- !query @@ -201,7 +201,7 @@ SELECT t1 FROM mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t1`","`spark_catalog`.`mydb1`.`t1`.`i1`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`t1`","objectList":"`spark_catalog`.`mydb1`.`t1`.`i1`"},"queryContext":[]} -- !query @@ -218,7 +218,7 @@ SELECT mydb1.t1.i1 FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`mydb1`.`t1`.`i1`","`spark_catalog`.`mydb2`.`t1`.`i1`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`mydb1`.`t1`.`i1`","objectList":"`spark_catalog`.`mydb2`.`t1`.`i1`"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/comments.sql.out b/sql/core/src/test/resources/sql-tests/results/comments.sql.out index 685cc95d3d70f..92490b89fc3c3 100644 --- a/sql/core/src/test/resources/sql-tests/results/comments.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/comments.sql.out @@ -132,7 +132,7 @@ select 1 as a struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nUnclosed bracketed comment(line 3, pos 0)\n\n== SQL ==\n/*abc*/\nselect 1 as a\n/*\n^^^\n\n2 as b\n/*abc*/\n, 3 as c\n\n/**/\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nUnclosed bracketed comment(line 3, pos 0)\n\n== SQL ==\n/*abc*/\nselect 1 as a\n/*\n^^^\n\n2 as b\n/*abc*/\n, 3 as c\n\n/**/\n"},"queryContext":[]} -- !query @@ -150,4 +150,4 @@ select 4 as d struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nUnclosed bracketed comment(line 3, pos 0)\n\n== SQL ==\n/*abc*/\nselect 1 as a\n/*\n^^^\n\n2 as b\n/*abc*/\n, 3 as c\n\n/**/\nselect 4 as d\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nUnclosed bracketed comment(line 3, pos 0)\n\n== SQL ==\n/*abc*/\nselect 1 as a\n/*\n^^^\n\n2 as b\n/*abc*/\n, 3 as c\n\n/**/\nselect 4 as d\n"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/count.sql.out b/sql/core/src/test/resources/sql-tests/results/count.sql.out index dbed5336733a6..513b8a89cbf73 100644 --- a/sql/core/src/test/resources/sql-tests/results/count.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/count.sql.out @@ -146,7 +146,7 @@ SELECT count() FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'count()' due to data type mismatch: count requires at least one argument. If you have to call the function count without arguments, set the legacy configuration `spark.sql.legacy.allowParameterlessCount` as true; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'count()' due to data type mismatch: count requires at least one argument. If you have to call the function count without arguments, set the legacy configuration `spark.sql.legacy.allowParameterlessCount` as true; line 1 pos 7"},"queryContext":[]} -- !query @@ -179,4 +179,4 @@ SELECT count(testData.*) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["count(testData.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2)"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"count(testData.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2)"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out index 8f28bbc12a4d6..5903e32dc4b92 100644 --- a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out @@ -21,7 +21,7 @@ select from_csv('1', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The expression '1' is not a valid schema string.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The expression '1' is not a valid schema string.; line 1 pos 7"},"queryContext":[]} -- !query @@ -30,7 +30,7 @@ select from_csv('1', 'a InvalidType') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot parse the data type: \n[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n\nFailed fallback parsing: \nDataType invalidtype is not supported.(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot parse the data type: \n[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n\nFailed fallback parsing: \nDataType invalidtype is not supported.(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n; line 1 pos 7"},"queryContext":[]} -- !query @@ -39,7 +39,7 @@ select from_csv('1', 'a INT', named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Must use a map() function for options; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Must use a map() function for options; line 1 pos 7"},"queryContext":[]} -- !query @@ -48,7 +48,7 @@ select from_csv('1', 'a INT', map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["A type of keys and values in map() must be string, but got map; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"A type of keys and values in map() must be string, but got map; line 1 pos 7"},"queryContext":[]} -- !query @@ -57,7 +57,7 @@ select from_csv() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function from_csv. Expected: one of 2 and 3; Found: 0; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function from_csv. Expected: one of 2 and 3; Found: 0; line 1 pos 7"},"queryContext":[]} -- !query @@ -82,7 +82,7 @@ select schema_of_csv(null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'schema_of_csv(NULL)' due to data type mismatch: The input csv should be a foldable string expression and not null; however, got NULL.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'schema_of_csv(NULL)' due to data type mismatch: The input csv should be a foldable string expression and not null; however, got NULL.; line 1 pos 7"},"queryContext":[]} -- !query @@ -99,7 +99,7 @@ SELECT schema_of_csv(csvField) FROM csvTable struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'schema_of_csv(csvtable.csvField)' due to data type mismatch: The input csv should be a foldable string expression and not null; however, got csvtable.csvField.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'schema_of_csv(csvtable.csvField)' due to data type mismatch: The input csv should be a foldable string expression and not null; however, got csvtable.csvField.; line 1 pos 7"},"queryContext":[]} -- !query @@ -132,7 +132,7 @@ select to_csv(named_struct('a', 1, 'b', 2), named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Must use a map() function for options; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Must use a map() function for options; line 1 pos 7"},"queryContext":[]} -- !query @@ -141,4 +141,4 @@ select to_csv(named_struct('a', 1, 'b', 2), map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["A type of keys and values in map() must be string, but got map; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"A type of keys and values in map() must be string, but got map; line 1 pos 7"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out index 95d33c6ffdddb..f756108f2ccde 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out @@ -232,4 +232,4 @@ SELECT * FROM t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table or view not found: t1; line 5 pos 20"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view not found: t1; line 5 pos 20"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out b/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out index d669e5729c45c..7f9f4df50355a 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out @@ -45,7 +45,7 @@ SELECT * FROM t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."},"queryContext":[]} -- !query @@ -82,7 +82,7 @@ SELECT * FROM t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."},"queryContext":[]} -- !query @@ -136,7 +136,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."},"queryContext":[]} -- !query @@ -151,7 +151,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."},"queryContext":[]} -- !query @@ -167,7 +167,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."},"queryContext":[]} -- !query @@ -181,7 +181,7 @@ WHERE c IN ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."},"queryContext":[]} -- !query @@ -210,7 +210,7 @@ SELECT * FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."},"queryContext":[]} -- !query @@ -223,7 +223,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228."},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/cte.sql.out b/sql/core/src/test/resources/sql-tests/results/cte.sql.out index 05d485fbaa5db..e964cd47adc20 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte.sql.out @@ -21,7 +21,7 @@ WITH s AS (SELECT 1 FROM s) SELECT * FROM s struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table or view not found: s; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view not found: s; line 1 pos 25"},"queryContext":[]} -- !query @@ -31,7 +31,7 @@ SELECT * FROM r struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table or view not found: r; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view not found: r; line 1 pos 33"},"queryContext":[]} -- !query @@ -50,7 +50,7 @@ WITH s1 AS (SELECT 1 FROM s2), s2 AS (SELECT 1 FROM s1) SELECT * FROM s1, s2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table or view not found: s2; line 1 pos 26"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view not found: s2; line 1 pos 26"},"queryContext":[]} -- !query @@ -129,7 +129,7 @@ SELECT * FROM t struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["')'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"')'","hint":""},"queryContext":[]} -- !query @@ -141,7 +141,7 @@ SELECT * FROM t struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCTE definition can't have duplicate names: 't'.(line 1, pos 0)\n\n== SQL ==\nWITH\n^^^\n t(x) AS (SELECT 1),\n t(x) AS (SELECT 2)\nSELECT * FROM t\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCTE definition can't have duplicate names: 't'.(line 1, pos 0)\n\n== SQL ==\nWITH\n^^^\n t(x) AS (SELECT 1),\n t(x) AS (SELECT 2)\nSELECT * FROM t\n"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out b/sql/core/src/test/resources/sql-tests/results/date.sql.out index 9900d9735039f..5ecd9e0e1e67c 100644 --- a/sql/core/src/test/resources/sql-tests/results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out @@ -21,7 +21,7 @@ select date '2020-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 2020-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect date '2020-01-01中文'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 2020-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect date '2020-01-01中文'\n-------^^^\n"},"queryContext":[]} -- !query @@ -54,7 +54,7 @@ select date'015' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 015(line 1, pos 7)\n\n== SQL ==\nselect date'015'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 015(line 1, pos 7)\n\n== SQL ==\nselect date'015'\n-------^^^\n"},"queryContext":[]} -- !query @@ -63,7 +63,7 @@ select date'2021-4294967297-11' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)\n\n== SQL ==\nselect date'2021-4294967297-11'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)\n\n== SQL ==\nselect date'2021-4294967297-11'\n-------^^^\n"},"queryContext":[]} -- !query @@ -266,7 +266,7 @@ select date_add('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -275,7 +275,7 @@ select date_add('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -284,7 +284,7 @@ select date_add('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -301,7 +301,7 @@ select date_add('2011-11-11', '1.2') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER","sqlState":"22023","messageParameters":["date_add"],"queryContext":[]} +{"errorClass":"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER","sqlState":"22023","messageParameters":{"functionName":"date_add"},"queryContext":[]} -- !query @@ -374,7 +374,7 @@ select date_sub('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -383,7 +383,7 @@ select date_sub('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -392,7 +392,7 @@ select date_sub('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -409,7 +409,7 @@ select date_sub(date'2011-11-11', '1.2') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER","sqlState":"22023","messageParameters":["date_sub"],"queryContext":[]} +{"errorClass":"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER","sqlState":"22023","messageParameters":{"functionName":"date_sub"},"queryContext":[]} -- !query @@ -450,7 +450,7 @@ select date_add('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -459,7 +459,7 @@ select date_sub('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -484,7 +484,7 @@ select date '2011-11-11' + 1E1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -525,7 +525,7 @@ select date '2001-10-01' - '2001-09-28' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(DATE '2001-10-01', CAST('2001-09-28' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('2001-09-28' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(DATE '2001-10-01', CAST('2001-09-28' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('2001-09-28' AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -566,7 +566,7 @@ select date '2001-09-28' - date_str from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(DATE '2001-09-28', CAST(date_view.date_str AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(date_view.date_str AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(DATE '2001-09-28', CAST(date_view.date_str AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(date_view.date_str AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -575,7 +575,7 @@ select date'2011-11-11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -584,7 +584,7 @@ select '1' + date'2011-11-11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -622,7 +622,7 @@ select to_date('26/October/2015', 'dd/MMMMM/yyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd/MMMMM/yyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -631,7 +631,7 @@ select from_json('{"d":"26/October/2015"}', 'd Date', map('dateFormat', 'dd/MMMM struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd/MMMMM/yyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -640,7 +640,7 @@ select from_csv('26/October/2015', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy')) struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd/MMMMM/yyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out index e9925ee4a9c69..1ae9ce5f5c417 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out @@ -5,7 +5,7 @@ select date_format('2018-11-17 13:33:33.333', 'GGGGG') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'GGGGG'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'GGGGG'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -14,7 +14,7 @@ select date_format('2018-11-17 13:33:33.333', 'yyyyyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'yyyyyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'yyyyyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -23,7 +23,7 @@ select date_format('2018-11-17 13:33:33.333', 'qqqqq') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Too many pattern letters: q"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Too many pattern letters: q"},"queryContext":[]} -- !query @@ -32,7 +32,7 @@ select date_format('2018-11-17 13:33:33.333', 'QQQQQ') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Too many pattern letters: Q"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Too many pattern letters: Q"},"queryContext":[]} -- !query @@ -41,7 +41,7 @@ select date_format('2018-11-17 13:33:33.333', 'MMMMM') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'MMMMM'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'MMMMM'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -50,7 +50,7 @@ select date_format('2018-11-17 13:33:33.333', 'LLLLL') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'LLLLL'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'LLLLL'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -59,7 +59,7 @@ select date_format('2018-11-17 13:33:33.333', 'EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'EEEEE'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -68,7 +68,7 @@ select date_format('2018-11-17 13:33:33.333', 'FF') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'FF'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'FF'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -77,7 +77,7 @@ select date_format('2018-11-17 13:33:33.333', 'ddd') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'ddd'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'ddd'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -86,7 +86,7 @@ select date_format('2018-11-17 13:33:33.333', 'DDDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'DDDD'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'DDDD'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -95,7 +95,7 @@ select date_format('2018-11-17 13:33:33.333', 'HHH') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'HHH'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'HHH'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -104,7 +104,7 @@ select date_format('2018-11-17 13:33:33.333', 'hhh') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'hhh'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'hhh'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -113,7 +113,7 @@ select date_format('2018-11-17 13:33:33.333', 'kkk') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'kkk'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'kkk'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -122,7 +122,7 @@ select date_format('2018-11-17 13:33:33.333', 'KKK') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'KKK'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'KKK'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -131,7 +131,7 @@ select date_format('2018-11-17 13:33:33.333', 'mmm') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'mmm'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'mmm'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -140,7 +140,7 @@ select date_format('2018-11-17 13:33:33.333', 'sss') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'sss'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'sss'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -149,7 +149,7 @@ select date_format('2018-11-17 13:33:33.333', 'SSSSSSSSSS') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'SSSSSSSSSS'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'SSSSSSSSSS'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -158,7 +158,7 @@ select date_format('2018-11-17 13:33:33.333', 'aa') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'aa'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'aa'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -167,7 +167,7 @@ select date_format('2018-11-17 13:33:33.333', 'V') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Pattern letter count must be 2: V"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Pattern letter count must be 2: V"},"queryContext":[]} -- !query @@ -176,7 +176,7 @@ select date_format('2018-11-17 13:33:33.333', 'zzzzz') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'zzzzz'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'zzzzz'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -185,7 +185,7 @@ select date_format('2018-11-17 13:33:33.333', 'XXXXXX') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Too many pattern letters: X"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Too many pattern letters: X"},"queryContext":[]} -- !query @@ -194,7 +194,7 @@ select date_format('2018-11-17 13:33:33.333', 'ZZZZZZ') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'ZZZZZZ'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'ZZZZZZ'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -203,7 +203,7 @@ select date_format('2018-11-17 13:33:33.333', 'OO') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Pattern letter count must be 1 or 4: O"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Pattern letter count must be 1 or 4: O"},"queryContext":[]} -- !query @@ -212,7 +212,7 @@ select date_format('2018-11-17 13:33:33.333', 'xxxxxx') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Too many pattern letters: x"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Too many pattern letters: x"},"queryContext":[]} -- !query @@ -221,7 +221,7 @@ select date_format('2018-11-17 13:33:33.333', 'A') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Illegal pattern character: A"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Illegal pattern character: A"},"queryContext":[]} -- !query @@ -230,7 +230,7 @@ select date_format('2018-11-17 13:33:33.333', 'n') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Illegal pattern character: n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Illegal pattern character: n"},"queryContext":[]} -- !query @@ -239,7 +239,7 @@ select date_format('2018-11-17 13:33:33.333', 'N') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Illegal pattern character: N"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Illegal pattern character: N"},"queryContext":[]} -- !query @@ -248,7 +248,7 @@ select date_format('2018-11-17 13:33:33.333', 'p') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Illegal pattern character: p"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Illegal pattern character: p"},"queryContext":[]} -- !query @@ -257,7 +257,7 @@ select date_format('2018-11-17 13:33:33.333', 'Y') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'Y'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'Y'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -266,7 +266,7 @@ select date_format('2018-11-17 13:33:33.333', 'w') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'w'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'w'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -275,7 +275,7 @@ select date_format('2018-11-17 13:33:33.333', 'W') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'W'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'W'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -284,7 +284,7 @@ select date_format('2018-11-17 13:33:33.333', 'u') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'u'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'u'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -293,7 +293,7 @@ select date_format('2018-11-17 13:33:33.333', 'e') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["All week-based patterns are unsupported since Spark 3.0, detected: e, Please use the SQL function EXTRACT instead"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"All week-based patterns are unsupported since Spark 3.0, detected: e, Please use the SQL function EXTRACT instead"},"queryContext":[]} -- !query @@ -302,7 +302,7 @@ select date_format('2018-11-17 13:33:33.333', 'c') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["All week-based patterns are unsupported since Spark 3.0, detected: c, Please use the SQL function EXTRACT instead"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"All week-based patterns are unsupported since Spark 3.0, detected: c, Please use the SQL function EXTRACT instead"},"queryContext":[]} -- !query @@ -311,7 +311,7 @@ select date_format('2018-11-17 13:33:33.333', 'B') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Illegal pattern character: B"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Illegal pattern character: B"},"queryContext":[]} -- !query @@ -320,7 +320,7 @@ select date_format('2018-11-17 13:33:33.333', 'C') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Unknown pattern letter: C"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Unknown pattern letter: C"},"queryContext":[]} -- !query @@ -329,4 +329,4 @@ select date_format('2018-11-17 13:33:33.333', 'I') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Unknown pattern letter: I"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Unknown pattern letter: I"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-legacy.sql.out index 1cb30fb3edc15..757215db24334 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-legacy.sql.out @@ -48,7 +48,7 @@ select col, date_format(col, 'q qq') from v struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Illegal pattern character 'q'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Illegal pattern character 'q'"},"queryContext":[]} -- !query @@ -57,7 +57,7 @@ select col, date_format(col, 'Q QQ QQQ QQQQ') from v struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Illegal pattern character 'Q'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Illegal pattern character 'Q'"},"queryContext":[]} -- !query @@ -270,7 +270,7 @@ select col, date_format(col, 'VV') from v struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Illegal pattern character 'V'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Illegal pattern character 'V'"},"queryContext":[]} -- !query @@ -307,7 +307,7 @@ select col, date_format(col, 'XXXX XXXXX') from v struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["invalid ISO 8601 format: length=4"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"invalid ISO 8601 format: length=4"},"queryContext":[]} -- !query @@ -330,7 +330,7 @@ select col, date_format(col, 'O OOOO') from v struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Illegal pattern character 'O'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Illegal pattern character 'O'"},"queryContext":[]} -- !query @@ -339,7 +339,7 @@ select col, date_format(col, 'x xx xxx xxxx xxxx xxxxx') from v struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Illegal pattern character 'x'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Illegal pattern character 'x'"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index 075bd01e73d8e..576cc7577aea2 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -21,7 +21,7 @@ select date '2020-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 2020-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect date '2020-01-01中文'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 2020-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect date '2020-01-01中文'\n-------^^^\n"},"queryContext":[]} -- !query @@ -54,7 +54,7 @@ select date'015' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 015(line 1, pos 7)\n\n== SQL ==\nselect date'015'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 015(line 1, pos 7)\n\n== SQL ==\nselect date'015'\n-------^^^\n"},"queryContext":[]} -- !query @@ -63,7 +63,7 @@ select date'2021-4294967297-11' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)\n\n== SQL ==\nselect date'2021-4294967297-11'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)\n\n== SQL ==\nselect date'2021-4294967297-11'\n-------^^^\n"},"queryContext":[]} -- !query @@ -266,7 +266,7 @@ select date_add('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -275,7 +275,7 @@ select date_add('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -284,7 +284,7 @@ select date_add('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -301,7 +301,7 @@ select date_add('2011-11-11', '1.2') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER","sqlState":"22023","messageParameters":["date_add"],"queryContext":[]} +{"errorClass":"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER","sqlState":"22023","messageParameters":{"functionName":"date_add"},"queryContext":[]} -- !query @@ -374,7 +374,7 @@ select date_sub('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -383,7 +383,7 @@ select date_sub('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -392,7 +392,7 @@ select date_sub('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -409,7 +409,7 @@ select date_sub(date'2011-11-11', '1.2') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER","sqlState":"22023","messageParameters":["date_sub"],"queryContext":[]} +{"errorClass":"SECOND_FUNCTION_ARGUMENT_NOT_INTEGER","sqlState":"22023","messageParameters":{"functionName":"date_sub"},"queryContext":[]} -- !query @@ -450,7 +450,7 @@ select date_add('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -459,7 +459,7 @@ select date_sub('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -484,7 +484,7 @@ select date '2011-11-11' + 1E1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -525,7 +525,7 @@ select date '2001-10-01' - '2001-09-28' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(DATE '2001-10-01', CAST('2001-09-28' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('2001-09-28' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(DATE '2001-10-01', CAST('2001-09-28' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('2001-09-28' AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -566,7 +566,7 @@ select date '2001-09-28' - date_str from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(DATE '2001-09-28', CAST(date_view.date_str AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(date_view.date_str AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(DATE '2001-09-28', CAST(date_view.date_str AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(date_view.date_str AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -575,7 +575,7 @@ select date'2011-11-11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -584,7 +584,7 @@ select '1' + date'2011-11-11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -814,7 +814,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"},"queryContext":[]} -- !query @@ -823,7 +823,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"},"queryContext":[]} -- !query @@ -832,7 +832,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"},"queryContext":[]} -- !query @@ -977,7 +977,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -986,7 +986,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -995,7 +995,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -1004,7 +1004,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -1013,7 +1013,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Rounding necessary"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Rounding necessary"},"queryContext":[]} -- !query @@ -1391,7 +1391,7 @@ select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1400,7 +1400,7 @@ select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1433,7 +1433,7 @@ select str - timestamp'2011-11-11 11:11:11' from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(ts_view.str - TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(ts_view.str - TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1442,7 +1442,7 @@ select timestamp'2011-11-11 11:11:11' - str from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1451,7 +1451,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1460,7 +1460,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' (double and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' (double and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1469,7 +1469,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1478,7 +1478,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out index 2e5dc96bf17dc..e15fe877ea1c8 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out @@ -5,7 +5,7 @@ select to_timestamp('294248', 'y') struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -14,7 +14,7 @@ select to_timestamp('1', 'yy') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'1'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'1'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -31,7 +31,7 @@ select to_timestamp('123', 'yy') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'123'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'123'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -40,7 +40,7 @@ select to_timestamp('1', 'yyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'1'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'1'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -49,7 +49,7 @@ select to_timestamp('1234567', 'yyyyyyy') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'yyyyyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'yyyyyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -66,7 +66,7 @@ select to_timestamp('9', 'DD') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'9'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'9'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -75,7 +75,7 @@ select to_timestamp('9', 'DDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'9'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'9'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -84,7 +84,7 @@ select to_timestamp('99', 'DDD') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'99'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'99'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -149,7 +149,7 @@ select from_csv('2018-366', 'date Date', map('dateFormat', 'yyyy-DDD')) struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'2018-366'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'2018-366'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/describe-query.sql.out b/sql/core/src/test/resources/sql-tests/results/describe-query.sql.out index 9b46264fd57c5..4243b55b05c46 100644 --- a/sql/core/src/test/resources/sql-tests/results/describe-query.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/describe-query.sql.out @@ -108,7 +108,7 @@ DESCRIBE INSERT INTO desc_temp1 values (1, 'val1') struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'desc_temp1'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'desc_temp1'","hint":""},"queryContext":[]} -- !query @@ -117,7 +117,7 @@ DESCRIBE INSERT INTO desc_temp1 SELECT * FROM desc_temp2 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'desc_temp1'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'desc_temp1'","hint":""},"queryContext":[]} -- !query @@ -129,7 +129,7 @@ DESCRIBE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'insert'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'insert'","hint":""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/describe.sql.out b/sql/core/src/test/resources/sql-tests/results/describe.sql.out index 74a96b11c38fd..0763a8e7644df 100644 --- a/sql/core/src/test/resources/sql-tests/results/describe.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/describe.sql.out @@ -362,7 +362,7 @@ DESC t PARTITION (c='Us', d=2) struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException -{"errorClass":"legacy","messageParameters":["Partition not found in table 't' database 'default':\nc -> Us\nd -> 2"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Partition not found in table 't' database 'default':\nc -> Us\nd -> 2"},"queryContext":[]} -- !query @@ -371,7 +371,7 @@ DESC t PARTITION (c='Us') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`default`.`t`'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`default`.`t`'"},"queryContext":[]} -- !query @@ -380,7 +380,7 @@ DESC t PARTITION (c='Us', d) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"INVALID_SQL_SYNTAX","sqlState":"42000","messageParameters":["PARTITION specification is incomplete: `d`"],"queryContext":[]} +{"errorClass":"INVALID_SQL_SYNTAX","sqlState":"42000","messageParameters":{"inputString":"PARTITION specification is incomplete: `d`"},"queryContext":[]} -- !query @@ -456,7 +456,7 @@ DESC temp_v PARTITION (c='Us', d=1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"FORBIDDEN_OPERATION","messageParameters":["DESC PARTITION","TEMPORARY VIEW","`temp_v`"],"queryContext":[]} +{"errorClass":"FORBIDDEN_OPERATION","messageParameters":{"statement":"DESC PARTITION","objectType":"TEMPORARY VIEW","objectName":"`temp_v`"},"queryContext":[]} -- !query @@ -535,7 +535,7 @@ DESC v PARTITION (c='Us', d=1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"FORBIDDEN_OPERATION","messageParameters":["DESC PARTITION","VIEW","`v`"],"queryContext":[]} +{"errorClass":"FORBIDDEN_OPERATION","messageParameters":{"statement":"DESC PARTITION","objectType":"VIEW","objectName":"`v`"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/except-all.sql.out b/sql/core/src/test/resources/sql-tests/results/except-all.sql.out index 508fc05853698..c67cbf1897128 100644 --- a/sql/core/src/test/resources/sql-tests/results/except-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/except-all.sql.out @@ -138,7 +138,7 @@ SELECT array(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["ExceptAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"ExceptAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table"},"queryContext":[]} -- !query @@ -210,7 +210,7 @@ SELECT k, v FROM tab4 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["ExceptAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"ExceptAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/extract.sql.out b/sql/core/src/test/resources/sql-tests/results/extract.sql.out index a1be7378f9d3d..274fe486dc595 100644 --- a/sql/core/src/test/resources/sql-tests/results/extract.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/extract.sql.out @@ -317,7 +317,7 @@ select extract(not_supported from c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -326,7 +326,7 @@ select extract(not_supported from i) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -335,7 +335,7 @@ select extract(not_supported from j) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -648,7 +648,7 @@ select date_part('not_supported', c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -657,7 +657,7 @@ select date_part(c, c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The 'field' parameter of function 'date_part' needs to be a string literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The 'field' parameter of function 'date_part' needs to be a string literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -674,7 +674,7 @@ select date_part(i, i) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The 'field' parameter of function 'date_part' needs to be a string literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The 'field' parameter of function 'date_part' needs to be a string literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -883,7 +883,7 @@ select extract(DAY from interval '2-1' YEAR TO MONTH) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -892,7 +892,7 @@ select date_part('DAY', interval '2-1' YEAR TO MONTH) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -901,7 +901,7 @@ select date_part('not_supported', interval '2-1' YEAR TO MONTH) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1014,7 +1014,7 @@ select extract(MONTH from interval '123 12:34:56.789123123' DAY TO SECOND) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Literals of type 'MONTH' are currently not supported for the interval day to second type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Literals of type 'MONTH' are currently not supported for the interval day to second type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1023,4 +1023,4 @@ select date_part('not_supported', interval '123 12:34:56.789123123' DAY TO SECON struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out b/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out index 301f36796d8c8..f53b7d0263b38 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out @@ -131,7 +131,7 @@ SELECT course, year, SUM(earnings) FROM courseSales GROUP BY ROLLUP(course, year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nEmpty set in ROLLUP grouping sets is not supported.(line 1, pos 61)\n\n== SQL ==\nSELECT course, year, SUM(earnings) FROM courseSales GROUP BY ROLLUP(course, year, (course, year), ()) ORDER BY course, year\n-------------------------------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nEmpty set in ROLLUP grouping sets is not supported.(line 1, pos 61)\n\n== SQL ==\nSELECT course, year, SUM(earnings) FROM courseSales GROUP BY ROLLUP(course, year, (course, year), ()) ORDER BY course, year\n-------------------------------------------------------------^^^\n"},"queryContext":[]} -- !query @@ -188,7 +188,7 @@ SELECT course, year, SUM(earnings) FROM courseSales GROUP BY CUBE(course, year, struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nEmpty set in CUBE grouping sets is not supported.(line 1, pos 61)\n\n== SQL ==\nSELECT course, year, SUM(earnings) FROM courseSales GROUP BY CUBE(course, year, (course, year), ()) ORDER BY course, year\n-------------------------------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nEmpty set in CUBE grouping sets is not supported.(line 1, pos 61)\n\n== SQL ==\nSELECT course, year, SUM(earnings) FROM courseSales GROUP BY CUBE(course, year, (course, year), ()) ORDER BY course, year\n-------------------------------------------------------------^^^\n"},"queryContext":[]} -- !query @@ -441,7 +441,7 @@ SELECT course, year, GROUPING(course) FROM courseSales GROUP BY course, year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["grouping() can only be used with GroupingSets/Cube/Rollup"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"grouping() can only be used with GroupingSets/Cube/Rollup"},"queryContext":[]} -- !query @@ -450,7 +450,7 @@ SELECT course, year, GROUPING_ID(course, year) FROM courseSales GROUP BY course, struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["grouping_id() can only be used with GroupingSets/Cube/Rollup"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"grouping_id() can only be used with GroupingSets/Cube/Rollup"},"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT course, year FROM courseSales GROUP BY course, year HAVING GROUPING(cours struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":{},"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT course, year FROM courseSales GROUP BY course, year HAVING GROUPING_ID(co struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":{},"queryContext":[]} -- !query @@ -550,7 +550,7 @@ SELECT course, year FROM courseSales GROUP BY course, year ORDER BY GROUPING(cou struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":{},"queryContext":[]} -- !query @@ -559,7 +559,7 @@ SELECT course, year FROM courseSales GROUP BY course, year ORDER BY GROUPING_ID( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":{},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out index 9378c0b9bf9ee..ad45a9c90d5a2 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out @@ -48,7 +48,7 @@ SELECT a, COUNT(b) FILTER (WHERE a >= 2) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(count(testdata.b) FILTER (WHERE (testdata.a >= 2)) AS `count(b) FILTER (WHERE (a >= 2))`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(count(testdata.b) FILTER (WHERE (testdata.a >= 2)) AS `count(b) FILTER (WHERE (a >= 2))`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -228,7 +228,7 @@ SELECT a, COUNT(b) FILTER (WHERE a != 2) FROM testData GROUP BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -708,7 +708,7 @@ SELECT a + 2, COUNT(b) FILTER (WHERE b IN (1, 2)) FROM testData GROUP BY a + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out index 5f2120b7138a2..6a7aec08dcc66 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out @@ -92,7 +92,7 @@ select a, b from data group by -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["GROUP BY position -1 is not in select list (valid range is [1, 2]); line 1 pos 31"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"GROUP BY position -1 is not in select list (valid range is [1, 2]); line 1 pos 31"},"queryContext":[]} -- !query @@ -101,7 +101,7 @@ select a, b from data group by 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["GROUP BY position 0 is not in select list (valid range is [1, 2]); line 1 pos 31"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"GROUP BY position 0 is not in select list (valid range is [1, 2]); line 1 pos 31"},"queryContext":[]} -- !query @@ -110,7 +110,7 @@ select a, b from data group by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 31"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 31"},"queryContext":[]} -- !query @@ -119,7 +119,7 @@ select a, b, sum(b) from data group by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got sum(data.b) AS `sum(b)`; line 1 pos 39"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got sum(data.b) AS `sum(b)`; line 1 pos 39"},"queryContext":[]} -- !query @@ -128,7 +128,7 @@ select a, b, sum(b) + 2 from data group by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got (sum(data.b) + CAST(2 AS BIGINT)) AS `(sum(b) + 2)`; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got (sum(data.b) + CAST(2 AS BIGINT)) AS `(sum(b) + 2)`; line 1 pos 43"},"queryContext":[]} -- !query @@ -152,7 +152,7 @@ select * from data group by a, b, 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Star (*) is not allowed in select list when GROUP BY ordinal position is used"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Star (*) is not allowed in select list when GROUP BY ordinal position is used"},"queryContext":[]} -- !query @@ -349,7 +349,7 @@ select a, b, count(1) from data group by a, -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["GROUP BY position -1 is not in select list (valid range is [1, 3]); line 1 pos 44"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"GROUP BY position -1 is not in select list (valid range is [1, 3]); line 1 pos 44"},"queryContext":[]} -- !query @@ -358,7 +358,7 @@ select a, b, count(1) from data group by a, 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got count(1) AS `count(1)`; line 1 pos 44"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got count(1) AS `count(1)`; line 1 pos 44"},"queryContext":[]} -- !query @@ -367,7 +367,7 @@ select a, b, count(1) from data group by cube(-1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["GROUP BY position -1 is not in select list (valid range is [1, 3]); line 1 pos 46"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"GROUP BY position -1 is not in select list (valid range is [1, 3]); line 1 pos 46"},"queryContext":[]} -- !query @@ -376,7 +376,7 @@ select a, b, count(1) from data group by cube(1, 3) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got count(1) AS `count(1)`; line 1 pos 49"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got count(1) AS `count(1)`; line 1 pos 49"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out index 015534e43a5dd..41de1de2031a7 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out @@ -15,7 +15,7 @@ SELECT a, COUNT(b) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(count(testdata.b) AS `count(b)`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(count(testdata.b) AS `count(b)`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -43,7 +43,7 @@ SELECT a, COUNT(b) FROM testData GROUP BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -107,7 +107,7 @@ SELECT a + 2, COUNT(b) FROM testData GROUP BY a + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -164,7 +164,7 @@ SELECT a AS k, COUNT(non_existing) FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`non_existing`","`testdata`.`a`, `testdata`.`b`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`non_existing`","objectList":"`testdata`.`a`, `testdata`.`b`"},"queryContext":[]} -- !query @@ -173,7 +173,7 @@ SELECT COUNT(b) AS k FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["aggregate functions are not allowed in GROUP BY, but found count(testdata.b)"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"aggregate functions are not allowed in GROUP BY, but found count(testdata.b)"},"queryContext":[]} -- !query @@ -191,7 +191,7 @@ SELECT k AS a, COUNT(v) FROM testDataHasSameNameWithAlias GROUP BY a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expression 'testdatahassamenamewithalias.k' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expression 'testdatahassamenamewithalias.k' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -208,7 +208,7 @@ SELECT a AS k, COUNT(b) FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`k`","`testdata`.`a`, `testdata`.`b`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`k`","objectList":"`testdata`.`a`, `testdata`.`b`"},"queryContext":[]} -- !query @@ -280,7 +280,7 @@ SELECT id FROM range(10) HAVING id > 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'id' is not an aggregate function. Wrap '()' in windowing function(s) or wrap 'id' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"grouping expressions sequence is empty, and 'id' is not an aggregate function. Wrap '()' in windowing function(s) or wrap 'id' in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -314,7 +314,7 @@ SELECT 1 FROM range(10) HAVING MAX(id) > 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(max(id) > CAST(0 AS BIGINT))]\nInvalid expressions: [max(id)]"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(max(id) > CAST(0 AS BIGINT))]\nInvalid expressions: [max(id)]"},"queryContext":[]} -- !query @@ -444,7 +444,7 @@ SELECT every(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'every(1)' due to data type mismatch: argument 1 requires boolean type, however, '1' is of int type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'every(1)' due to data type mismatch: argument 1 requires boolean type, however, '1' is of int type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -453,7 +453,7 @@ SELECT some(1S) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'some(1S)' due to data type mismatch: argument 1 requires boolean type, however, '1S' is of smallint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'some(1S)' due to data type mismatch: argument 1 requires boolean type, however, '1S' is of smallint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -462,7 +462,7 @@ SELECT any(1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'any(1L)' due to data type mismatch: argument 1 requires boolean type, however, '1L' is of bigint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'any(1L)' due to data type mismatch: argument 1 requires boolean type, however, '1L' is of bigint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -471,7 +471,7 @@ SELECT every("true") struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'every('true')' due to data type mismatch: argument 1 requires boolean type, however, ''true'' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'every('true')' due to data type mismatch: argument 1 requires boolean type, however, ''true'' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -480,7 +480,7 @@ SELECT bool_and(1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'bool_and(1.0BD)' due to data type mismatch: argument 1 requires boolean type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'bool_and(1.0BD)' due to data type mismatch: argument 1 requires boolean type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -489,7 +489,7 @@ SELECT bool_or(1.0D) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'bool_or(1.0D)' due to data type mismatch: argument 1 requires boolean type, however, '1.0D' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'bool_or(1.0D)' due to data type mismatch: argument 1 requires boolean type, however, '1.0D' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -609,7 +609,7 @@ SELECT count(*) FROM test_agg WHERE count(*) > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(count(1) > 1L)]\nInvalid expressions: [count(1)]"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(count(1) > 1L)]\nInvalid expressions: [count(1)]"},"queryContext":[]} -- !query @@ -618,7 +618,7 @@ SELECT count(*) FROM test_agg WHERE count(*) + 1L > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [((count(1) + 1L) > 1L)]\nInvalid expressions: [count(1)]"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [((count(1) + 1L) > 1L)]\nInvalid expressions: [count(1)]"},"queryContext":[]} -- !query @@ -627,7 +627,7 @@ SELECT count(*) FROM test_agg WHERE k = 1 or k = 2 or count(*) + 1L > 1L or max( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(((test_agg.k = 1) OR (test_agg.k = 2)) OR (((count(1) + 1L) > 1L) OR (max(test_agg.k) > 1)))]\nInvalid expressions: [count(1), max(test_agg.k)]"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(((test_agg.k = 1) OR (test_agg.k = 2)) OR (((count(1) + 1L) > 1L) OR (max(test_agg.k) > 1)))]\nInvalid expressions: [count(1), max(test_agg.k)]"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out b/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out index 371e449068b2f..1c7ac914a342d 100644 --- a/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out @@ -134,7 +134,7 @@ SELECT a, b, c, count(d) FROM grouping GROUP BY WITH ROLLUP struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'ROLLUP'",": extra input 'ROLLUP'"],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'ROLLUP'","hint":": extra input 'ROLLUP'"},"queryContext":[]} -- !query @@ -143,7 +143,7 @@ SELECT a, b, c, count(d) FROM grouping GROUP BY WITH CUBE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'CUBE'",": extra input 'CUBE'"],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'CUBE'","hint":": extra input 'CUBE'"},"queryContext":[]} -- !query @@ -152,7 +152,7 @@ SELECT c1 FROM (values (1,2), (3,2)) t(c1, c2) GROUP BY GROUPING SETS (()) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expression 't.c1' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expression 't.c1' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/having.sql.out b/sql/core/src/test/resources/sql-tests/results/having.sql.out index 224912ea1a63d..b74abe25bf3ec 100644 --- a/sql/core/src/test/resources/sql-tests/results/having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/having.sql.out @@ -35,7 +35,7 @@ SELECT count(k) FROM hav GROUP BY v HAVING v = array(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(hav.v = array(1))' due to data type mismatch: differing types in '(hav.v = array(1))' (int and array).; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(hav.v = array(1))' due to data type mismatch: differing types in '(hav.v = array(1))' (int and array).; line 1 pos 43"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out index ba2603a50e443..368bea9c3a07a 100644 --- a/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out @@ -17,7 +17,7 @@ select upper(x -> x) as v struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["A lambda function should only be used in a higher order function. However, its class is org.apache.spark.sql.catalyst.expressions.Upper, which is not a higher order function.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"A lambda function should only be used in a higher order function. However, its class is org.apache.spark.sql.catalyst.expressions.Upper, which is not a higher order function.; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out b/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out index f47ff4a61e725..d615b90bf021f 100644 --- a/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out @@ -129,4 +129,4 @@ SELECT company FROM ilike_any_table WHERE company ILIKE ALL () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nExpected something between '(' and ')'.(line 1, pos 50)\n\n== SQL ==\nSELECT company FROM ilike_any_table WHERE company ILIKE ALL ()\n--------------------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nExpected something between '(' and ')'.(line 1, pos 50)\n\n== SQL ==\nSELECT company FROM ilike_any_table WHERE company ILIKE ALL ()\n--------------------------------------------------^^^\n"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out b/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out index 033fe7579c80b..e0d60acb3f85d 100644 --- a/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out @@ -135,4 +135,4 @@ SELECT company FROM ilike_any_table WHERE company ILIKE ANY () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nExpected something between '(' and ')'.(line 1, pos 50)\n\n== SQL ==\nSELECT company FROM ilike_any_table WHERE company ILIKE ANY ()\n--------------------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nExpected something between '(' and ')'.(line 1, pos 50)\n\n== SQL ==\nSELECT company FROM ilike_any_table WHERE company ILIKE ANY ()\n--------------------------------------------------^^^\n"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out b/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out index ed6cba10daab1..4d9349aec5dde 100644 --- a/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out @@ -110,7 +110,7 @@ select * from values ("one", rand(5)), ("two", 3.0D) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot evaluate expression rand(5) in inline table definition; line 1 pos 29"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot evaluate expression rand(5) in inline table definition; line 1 pos 29"},"queryContext":[]} -- !query @@ -119,7 +119,7 @@ select * from values ("one", 2.0), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expected 2 columns but found 1 columns in row 1; line 1 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expected 2 columns but found 1 columns in row 1; line 1 pos 14"},"queryContext":[]} -- !query @@ -128,7 +128,7 @@ select * from values ("one", array(0, 1)), ("two", struct(1, 2)) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["incompatible types found in column b for inline table; line 1 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"incompatible types found in column b for inline table; line 1 pos 14"},"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select * from values ("one"), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expected 2 columns but found 1 columns in row 0; line 1 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expected 2 columns but found 1 columns in row 0; line 1 pos 14"},"queryContext":[]} -- !query @@ -146,7 +146,7 @@ select * from values ("one", random_not_exist_func(1)), ("two", 2) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Undefined function: random_not_exist_func. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.random_not_exist_func.; line 1 pos 29"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Undefined function: random_not_exist_func. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.random_not_exist_func.; line 1 pos 29"},"queryContext":[]} -- !query @@ -155,7 +155,7 @@ select * from values ("one", count(1)), ("two", 2) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot evaluate expression count(1) in inline table definition; line 1 pos 29"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot evaluate expression count(1) in inline table definition; line 1 pos 29"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out b/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out index 9d3ea78033691..710d6cc6c7f6d 100644 --- a/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out @@ -95,7 +95,7 @@ SELECT array(1), 2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["IntersectAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"IntersectAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table"},"queryContext":[]} -- !query @@ -106,7 +106,7 @@ SELECT k, v FROM tab2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["IntersectAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"IntersectAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 9441e9708b5f4..60a7bd2dd48c7 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -13,7 +13,7 @@ select interval 4 month 2 weeks 3 microseconds * 1.5 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval 4 month 2 weeks 3 microseconds(line 1, pos 7)\n\n== SQL ==\nselect interval 4 month 2 weeks 3 microseconds * 1.5\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval 4 month 2 weeks 3 microseconds(line 1, pos 7)\n\n== SQL ==\nselect interval 4 month 2 weeks 3 microseconds * 1.5\n-------^^^\n"},"queryContext":[]} -- !query @@ -46,7 +46,7 @@ select interval 2147483647 month * 2 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["integer overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"integer overflow"},"queryContext":[]} -- !query @@ -55,7 +55,7 @@ select interval 2147483647 month / 0.5 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Overflow"},"queryContext":[]} -- !query @@ -64,7 +64,7 @@ select interval 2147483647 day * 2 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -73,7 +73,7 @@ select interval 2147483647 day / 0.5 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -178,7 +178,7 @@ select '2' / interval 2 second struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2' AS DOUBLE) / INTERVAL '02' SECOND)' due to data type mismatch: differing types in '(CAST('2' AS DOUBLE) / INTERVAL '02' SECOND)' (double and interval second).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2' AS DOUBLE) / INTERVAL '02' SECOND)' due to data type mismatch: differing types in '(CAST('2' AS DOUBLE) / INTERVAL '02' SECOND)' (double and interval second).; line 1 pos 7"},"queryContext":[]} -- !query @@ -187,7 +187,7 @@ select '2' / interval 2 year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2' AS DOUBLE) / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(CAST('2' AS DOUBLE) / INTERVAL '2' YEAR)' (double and interval year).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2' AS DOUBLE) / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(CAST('2' AS DOUBLE) / INTERVAL '2' YEAR)' (double and interval year).; line 1 pos 7"},"queryContext":[]} -- !query @@ -196,7 +196,7 @@ select interval '2 seconds' / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_DIVIDED_BY_ZERO","sqlState":"22012","messageParameters":[],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":30,"fragment":"interval '2 seconds' / "}]} +{"errorClass":"INTERVAL_DIVIDED_BY_ZERO","sqlState":"22012","messageParameters":{},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":30,"fragment":"interval '2 seconds' / "}]} -- !query @@ -229,7 +229,7 @@ select interval '2' year / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_DIVIDED_BY_ZERO","sqlState":"22012","messageParameters":[],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"interval '2' year / "}]} +{"errorClass":"INTERVAL_DIVIDED_BY_ZERO","sqlState":"22012","messageParameters":{},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":27,"fragment":"interval '2' year / "}]} -- !query @@ -262,7 +262,7 @@ select 2 / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(2 / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(2 / INTERVAL '2' YEAR)' (int and interval year).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(2 / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(2 / INTERVAL '2' YEAR)' (int and interval year).; line 1 pos 7"},"queryContext":[]} -- !query @@ -271,7 +271,7 @@ select 2 / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(2 / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(2 / INTERVAL '02' HOUR)' (int and interval hour).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(2 / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(2 / INTERVAL '02' HOUR)' (int and interval hour).; line 1 pos 7"},"queryContext":[]} -- !query @@ -280,7 +280,7 @@ select null / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(NULL / INTERVAL '2' YEAR)' (void and interval year).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(NULL / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(NULL / INTERVAL '2' YEAR)' (void and interval year).; line 1 pos 7"},"queryContext":[]} -- !query @@ -289,7 +289,7 @@ select null / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(NULL / INTERVAL '02' HOUR)' (void and interval hour).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(NULL / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(NULL / INTERVAL '02' HOUR)' (void and interval hour).; line 1 pos 7"},"queryContext":[]} -- !query @@ -298,7 +298,7 @@ select -interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect -interval '-1 month 1 day -1 second'\n--------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect -interval '-1 month 1 day -1 second'\n--------^^^\n"},"queryContext":[]} -- !query @@ -323,7 +323,7 @@ select -interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect -interval -1 month 1 day -1 second\n--------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect -interval -1 month 1 day -1 second\n--------^^^\n"},"queryContext":[]} -- !query @@ -348,7 +348,7 @@ select +interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect +interval '-1 month 1 day -1 second'\n--------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect +interval '-1 month 1 day -1 second'\n--------^^^\n"},"queryContext":[]} -- !query @@ -373,7 +373,7 @@ select +interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect +interval -1 month 1 day -1 second\n--------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect +interval -1 month 1 day -1 second\n--------^^^\n"},"queryContext":[]} -- !query @@ -654,7 +654,7 @@ select make_dt_interval(2147483647) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -695,7 +695,7 @@ select make_ym_interval(178956970, 8) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["integer overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"integer overflow"},"queryContext":[]} -- !query @@ -712,7 +712,7 @@ select make_ym_interval(-178956970, -9) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["integer overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"integer overflow"},"queryContext":[]} -- !query @@ -785,7 +785,7 @@ select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisec struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond(line 1, pos 7)\n\n== SQL ==\nselect interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond(line 1, pos 7)\n\n== SQL ==\nselect interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond\n-------^^^\n"},"queryContext":[]} -- !query @@ -818,7 +818,7 @@ select interval '30' year '25' month '-100' day '40' hour '80' minute '299.88998 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot mix year-month and day-time fields: interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second(line 1, pos 7)\n\n== SQL ==\nselect interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot mix year-month and day-time fields: interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second(line 1, pos 7)\n\n== SQL ==\nselect interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second\n-------^^^\n"},"queryContext":[]} -- !query @@ -947,7 +947,7 @@ select interval '20 15:40:32.99899999' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to hour\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to hour\n----------------^^^\n"},"queryContext":[]} -- !query @@ -956,7 +956,7 @@ select interval '20 15:40:32.99899999' day to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to minute\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to minute\n----------------^^^\n"},"queryContext":[]} -- !query @@ -965,7 +965,7 @@ select interval '15:40:32.99899999' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40:32.99899999' hour to minute\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40:32.99899999' hour to minute\n----------------^^^\n"},"queryContext":[]} -- !query @@ -974,7 +974,7 @@ select interval '15:40.99899999' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40.99899999' hour to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40.99899999' hour to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -983,7 +983,7 @@ select interval '15:40' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40' hour to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40' hour to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -992,7 +992,7 @@ select interval '20 40:32.99899999' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 40:32.99899999' minute to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 40:32.99899999' minute to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1001,7 +1001,7 @@ select interval 10 nanoseconds struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nError parsing ' 10 nanoseconds' to interval, invalid unit 'nanoseconds'(line 1, pos 16)\n\n== SQL ==\nselect interval 10 nanoseconds\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nError parsing ' 10 nanoseconds' to interval, invalid unit 'nanoseconds'(line 1, pos 16)\n\n== SQL ==\nselect interval 10 nanoseconds\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1090,7 +1090,7 @@ select interval struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nat least one time unit should be given for interval literal(line 1, pos 7)\n\n== SQL ==\nselect interval\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nat least one time unit should be given for interval literal(line 1, pos 7)\n\n== SQL ==\nselect interval\n-------^^^\n"},"queryContext":[]} -- !query @@ -1099,7 +1099,7 @@ select interval 1 fake_unit struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nError parsing ' 1 fake_unit' to interval, invalid unit 'fake_unit'(line 1, pos 16)\n\n== SQL ==\nselect interval 1 fake_unit\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nError parsing ' 1 fake_unit' to interval, invalid unit 'fake_unit'(line 1, pos 16)\n\n== SQL ==\nselect interval 1 fake_unit\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1108,7 +1108,7 @@ select interval 1 year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nThe value of from-to unit must be a string(line 1, pos 16)\n\n== SQL ==\nselect interval 1 year to month\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nThe value of from-to unit must be a string(line 1, pos 16)\n\n== SQL ==\nselect interval 1 year to month\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1117,7 +1117,7 @@ select interval '1' year to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nIntervals FROM year TO second are not supported.(line 1, pos 16)\n\n== SQL ==\nselect interval '1' year to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nIntervals FROM year TO second are not supported.(line 1, pos 16)\n\n== SQL ==\nselect interval '1' year to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1126,7 +1126,7 @@ select interval '10-9' year to month '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '2-1' year to month\n-------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '2-1' year to month\n-------------------------------------^^^\n"},"queryContext":[]} -- !query @@ -1135,7 +1135,7 @@ select interval '10-9' year to month '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '12:11:10' hour to second\n-------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '12:11:10' hour to second\n-------------------------------------^^^\n"},"queryContext":[]} -- !query @@ -1144,7 +1144,7 @@ select interval '1 15:11' day to minute '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 40)\n\n== SQL ==\nselect interval '1 15:11' day to minute '12:11:10' hour to second\n----------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 40)\n\n== SQL ==\nselect interval '1 15:11' day to minute '12:11:10' hour to second\n----------------------------------------^^^\n"},"queryContext":[]} -- !query @@ -1153,7 +1153,7 @@ select interval 1 year '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '2-1' year to month\n-----------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '2-1' year to month\n-----------------------^^^\n"},"queryContext":[]} -- !query @@ -1162,7 +1162,7 @@ select interval 1 year '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '12:11:10' hour to second\n-----------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '12:11:10' hour to second\n-----------------------^^^\n"},"queryContext":[]} -- !query @@ -1171,7 +1171,7 @@ select interval '10-9' year to month '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '1' year\n-------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '1' year\n-------------------------------------^^^\n"},"queryContext":[]} -- !query @@ -1180,7 +1180,7 @@ select interval '12:11:10' hour to second '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 42)\n\n== SQL ==\nselect interval '12:11:10' hour to second '1' year\n------------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 42)\n\n== SQL ==\nselect interval '12:11:10' hour to second '1' year\n------------------------------------------^^^\n"},"queryContext":[]} -- !query @@ -1189,7 +1189,7 @@ select interval (-30) day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1198,7 +1198,7 @@ select interval (a + 1) day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1207,7 +1207,7 @@ select interval 30 day day day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'day'",": extra input 'day'"],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'day'","hint":": extra input 'day'"},"queryContext":[]} -- !query @@ -1216,7 +1216,7 @@ select interval (-30) days struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1225,7 +1225,7 @@ select interval (a + 1) days struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1234,7 +1234,7 @@ select interval 30 days days days struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'days'",": extra input 'days'"],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'days'","hint":": extra input 'days'"},"queryContext":[]} -- !query @@ -1251,7 +1251,7 @@ SELECT INTERVAL '178956970-8' YEAR TO MONTH struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nError parsing interval year-month string: integer overflow(line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '178956970-8' YEAR TO MONTH\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nError parsing interval year-month string: integer overflow(line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '178956970-8' YEAR TO MONTH\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1296,7 +1296,7 @@ select struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR + CAST('3-3 year to month' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST('3-3 year to month' AS DOUBLE))' (interval year and double).; line 2 pos 2"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '2' YEAR + CAST('3-3 year to month' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST('3-3 year to month' AS DOUBLE))' (interval year and double).; line 2 pos 2"},"queryContext":[]} -- !query @@ -1321,7 +1321,7 @@ select interval '2' year + '3-3' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR + CAST('3-3' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST('3-3' AS DOUBLE))' (interval year and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '2' YEAR + CAST('3-3' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST('3-3' AS DOUBLE))' (interval year and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1330,7 +1330,7 @@ select interval '2' year - '4' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR - CAST('4' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - CAST('4' AS DOUBLE))' (interval year and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '2' YEAR - CAST('4' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - CAST('4' AS DOUBLE))' (interval year and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1363,7 +1363,7 @@ select interval '2' year + str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR + CAST(interval_view.str AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST(interval_view.str AS DOUBLE))' (interval year and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '2' YEAR + CAST(interval_view.str AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST(interval_view.str AS DOUBLE))' (interval year and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1372,7 +1372,7 @@ select interval '2' year - str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' YEAR - CAST(interval_view.str AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - CAST(interval_view.str AS DOUBLE))' (interval year and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '2' YEAR - CAST(interval_view.str AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - CAST(interval_view.str AS DOUBLE))' (interval year and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1397,7 +1397,7 @@ select interval '2-2' year to month + interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1406,7 +1406,7 @@ select interval '3' day + interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1415,7 +1415,7 @@ select interval '2-2' year to month - interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1424,7 +1424,7 @@ select interval '3' day - interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' due to data type mismatch: differing types in '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' (interval day and interval year to month).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' due to data type mismatch: differing types in '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' (interval day and interval year to month).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1433,7 +1433,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1442,7 +1442,7 @@ select 1 + interval '2' month struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(1 + INTERVAL '2' MONTH)' due to data type mismatch: differing types in '(1 + INTERVAL '2' MONTH)' (int and interval month).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(1 + INTERVAL '2' MONTH)' due to data type mismatch: differing types in '(1 + INTERVAL '2' MONTH)' (int and interval month).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1451,7 +1451,7 @@ select interval '2' second + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '1 + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '1 + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1460,7 +1460,7 @@ select interval '2' month - 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '2' MONTH - 1)' due to data type mismatch: differing types in '(INTERVAL '2' MONTH - 1)' (interval month and int).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '2' MONTH - 1)' due to data type mismatch: differing types in '(INTERVAL '2' MONTH - 1)' (interval month and int).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1517,7 +1517,7 @@ select interval '-\t2-2\t' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match year-month format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: -\t2-2\t(line 1, pos 16)\n\n== SQL ==\nselect interval '-\\t2-2\\t' year to month\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match year-month format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: -\t2-2\t(line 1, pos 16)\n\n== SQL ==\nselect interval '-\\t2-2\\t' year to month\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1534,7 +1534,7 @@ select interval '\n-\t10\t 12:34:46.789\t' day to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: \n-\t10\t 12:34:46.789\t, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '\\n-\\t10\\t 12:34:46.789\\t' day to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: \n-\t10\t 12:34:46.789\t, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '\\n-\\t10\\t 12:34:46.789\\t' day to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1543,7 +1543,7 @@ select interval '中文 interval 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 中文 interval 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval '中文 interval 1 day'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: 中文 interval 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval '中文 interval 1 day'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1552,7 +1552,7 @@ select interval 'interval中文 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: interval中文 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval中文 1 day'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: interval中文 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval中文 1 day'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1561,7 +1561,7 @@ select interval 'interval 1中文day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: interval 1中文day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval 1中文day'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: interval 1中文day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval 1中文day'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1570,7 +1570,7 @@ select -(a) from values (interval '-2147483648 months', interval '2147483647 mon struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow",""],"queryContext":[]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"integer overflow","alternative":""},"queryContext":[]} -- !query @@ -1579,7 +1579,7 @@ select a - b from values (interval '-2147483648 months', interval '2147483647 mo struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_subtract' to tolerate overflow and return NULL instead."],"queryContext":[]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"integer overflow","alternative":" Use 'try_subtract' to tolerate overflow and return NULL instead."},"queryContext":[]} -- !query @@ -1588,7 +1588,7 @@ select b + interval '1 month' from values (interval '-2147483648 months', interv struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_add' to tolerate overflow and return NULL instead."],"queryContext":[]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"integer overflow","alternative":" Use 'try_add' to tolerate overflow and return NULL instead."},"queryContext":[]} -- !query @@ -1597,7 +1597,7 @@ select a * 1.1 from values (interval '-2147483648 months', interval '2147483647 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Overflow"},"queryContext":[]} -- !query @@ -1606,7 +1606,7 @@ select a / 0.5 from values (interval '-2147483648 months', interval '2147483647 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Overflow"},"queryContext":[]} -- !query @@ -1667,7 +1667,7 @@ select interval '+' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: +(line 1, pos 7)\n\n== SQL ==\nselect interval '+'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: +(line 1, pos 7)\n\n== SQL ==\nselect interval '+'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1676,7 +1676,7 @@ select interval '+.' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: +.(line 1, pos 7)\n\n== SQL ==\nselect interval '+.'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: +.(line 1, pos 7)\n\n== SQL ==\nselect interval '+.'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1685,7 +1685,7 @@ select interval '1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1694,7 +1694,7 @@ select interval '1.2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1.2(line 1, pos 7)\n\n== SQL ==\nselect interval '1.2'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: 1.2(line 1, pos 7)\n\n== SQL ==\nselect interval '1.2'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1703,7 +1703,7 @@ select interval '- 2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: - 2(line 1, pos 7)\n\n== SQL ==\nselect interval '- 2'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: - 2(line 1, pos 7)\n\n== SQL ==\nselect interval '- 2'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1712,7 +1712,7 @@ select interval '1 day -' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1 day -(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day -'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: 1 day -(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day -'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1721,7 +1721,7 @@ select interval '1 day 1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the INTERVAL value: 1 day 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day 1'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the INTERVAL value: 1 day 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day 1'\n-------^^^\n"},"queryContext":[]} -- !query @@ -1730,7 +1730,7 @@ select interval '1 day 2' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: 1 day 2(line 1, pos 16)\n\n== SQL ==\nselect interval '1 day 2' day\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: 1 day 2(line 1, pos 16)\n\n== SQL ==\nselect interval '1 day 2' day\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1739,7 +1739,7 @@ select interval 'interval 1' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: interval 1(line 1, pos 16)\n\n== SQL ==\nselect interval 'interval 1' day\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: interval 1(line 1, pos 16)\n\n== SQL ==\nselect interval 'interval 1' day\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1772,7 +1772,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":50,"fragment":"(INTERVAL '-178956970-8' YEAR TO MONTH) / -"}]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"Interval value overflows after being divided by -1","alternative":" Use 'try_divide' to tolerate overflow and return NULL instead."},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":50,"fragment":"(INTERVAL '-178956970-8' YEAR TO MONTH) / -"}]} -- !query @@ -1781,7 +1781,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":51,"fragment":"(INTERVAL '-178956970-8' YEAR TO MONTH) / -1"}]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"Interval value overflows after being divided by -1","alternative":" Use 'try_divide' to tolerate overflow and return NULL instead."},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":51,"fragment":"(INTERVAL '-178956970-8' YEAR TO MONTH) / -1"}]} -- !query @@ -1790,7 +1790,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1.0 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Overflow"},"queryContext":[]} -- !query @@ -1799,7 +1799,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1.0D struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["not in range"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"not in range"},"queryContext":[]} -- !query @@ -1824,7 +1824,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":64,"fragment":"(INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -"}]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"Interval value overflows after being divided by -1","alternative":" Use 'try_divide' to tolerate overflow and return NULL instead."},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":64,"fragment":"(INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -"}]} -- !query @@ -1833,7 +1833,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["Interval value overflows after being divided by -1"," Use 'try_divide' to tolerate overflow and return NULL instead."],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":65,"fragment":"(INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1"}]} +{"errorClass":"INTERVAL_ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"Interval value overflows after being divided by -1","alternative":" Use 'try_divide' to tolerate overflow and return NULL instead."},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":65,"fragment":"(INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1"}]} -- !query @@ -1842,7 +1842,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0 struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Overflow"},"queryContext":[]} -- !query @@ -1851,7 +1851,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0D struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["not in range"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"not in range"},"queryContext":[]} -- !query @@ -1956,7 +1956,7 @@ SELECT INTERVAL '106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '106751992 04' DAY TO HOUR\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '106751992 04' DAY TO HOUR\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1965,7 +1965,7 @@ SELECT INTERVAL '-106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-106751992 04' DAY TO HOUR\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-106751992 04' DAY TO HOUR\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1974,7 +1974,7 @@ SELECT INTERVAL '2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '2562047789:00' HOUR TO MINUTE\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '2562047789:00' HOUR TO MINUTE\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1983,7 +1983,7 @@ SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-2562047789:00' HOUR TO MINUTE\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-2562047789:00' HOUR TO MINUTE\n----------------^^^\n"},"queryContext":[]} -- !query @@ -1992,7 +1992,7 @@ SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n"},"queryContext":[]} -- !query @@ -2001,7 +2001,7 @@ SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n"},"queryContext":[]} -- !query @@ -2138,7 +2138,7 @@ SELECT INTERVAL 1 MONTH > INTERVAL 20 DAYS struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' (interval month and interval day).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' (interval month and interval day).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2147,7 +2147,7 @@ SELECT INTERVAL '1' DAY < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' DAY < '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY < '1')' (interval day and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' DAY < '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY < '1')' (interval day and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2156,7 +2156,7 @@ SELECT INTERVAL '1' DAY = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' DAY = '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY = '1')' (interval day and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' DAY = '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY = '1')' (interval day and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2165,7 +2165,7 @@ SELECT INTERVAL '1' DAY > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' DAY > '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY > '1')' (interval day and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' DAY > '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY > '1')' (interval day and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2174,7 +2174,7 @@ SELECT '1' < INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' < INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' < INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' < INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' < INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2183,7 +2183,7 @@ SELECT '1' = INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' = INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' = INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' = INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' = INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2192,7 +2192,7 @@ SELECT '1' > INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' > INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' > INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' > INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' > INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2201,7 +2201,7 @@ SELECT INTERVAL '1' YEAR < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' YEAR < '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR < '1')' (interval year and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' YEAR < '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR < '1')' (interval year and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2210,7 +2210,7 @@ SELECT INTERVAL '1' YEAR = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' YEAR = '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR = '1')' (interval year and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' YEAR = '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR = '1')' (interval year and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2219,7 +2219,7 @@ SELECT INTERVAL '1' YEAR > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' YEAR > '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR > '1')' (interval year and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' YEAR > '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR > '1')' (interval year and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2228,7 +2228,7 @@ SELECT '1' < INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' < INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' < INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' < INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' < INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2237,7 +2237,7 @@ SELECT '1' = INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' = INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' = INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' = INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' = INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2246,7 +2246,7 @@ SELECT '1' > INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' > INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' > INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' > INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' > INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2271,7 +2271,7 @@ SELECT array(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'array(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function array should all be the same type, but it's [interval month, interval day]; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'array(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function array should all be the same type, but it's [interval month, interval day]; line 1 pos 7"},"queryContext":[]} -- !query @@ -2296,7 +2296,7 @@ SELECT coalesce(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function coalesce should all be the same type, but it's [interval month, interval day]; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function coalesce should all be the same type, but it's [interval month, interval day]; line 1 pos 7"},"queryContext":[]} -- !query @@ -2353,7 +2353,7 @@ SELECT div(INTERVAL '1' MONTH, INTERVAL '-1' DAY) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' (interval month and interval day).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' (interval month and interval day).; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out index cee6acb23aa8d..7678fd26ffd58 100644 --- a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out @@ -114,7 +114,7 @@ SELECT * FROM t1, LATERAL (SELECT t1.*, t2.* FROM t2, LATERAL (SELECT t1.*, t2.* struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 't1.*' given input columns 'c1, c2'; line 1 pos 70"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 't1.*' given input columns 'c1, c2'; line 1 pos 70"},"queryContext":[]} -- !query @@ -149,7 +149,7 @@ SELECT * FROM t1 NATURAL JOIN LATERAL (SELECT c1 + c2 AS c2) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"UNSUPPORTED_FEATURE","errorSubClass":"LATERAL_NATURAL_JOIN","sqlState":"0A000","messageParameters":[],"queryContext":[]} +{"errorClass":"UNSUPPORTED_FEATURE","errorSubClass":"LATERAL_NATURAL_JOIN","sqlState":"0A000","messageParameters":{},"queryContext":[]} -- !query @@ -158,7 +158,7 @@ SELECT * FROM t1 JOIN LATERAL (SELECT c1 + c2 AS c2) USING (c2) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"UNSUPPORTED_FEATURE","errorSubClass":"LATERAL_JOIN_USING","sqlState":"0A000","messageParameters":[],"queryContext":[]} +{"errorClass":"UNSUPPORTED_FEATURE","errorSubClass":"LATERAL_JOIN_USING","sqlState":"0A000","messageParameters":{},"queryContext":[]} -- !query @@ -256,7 +256,7 @@ SELECT * FROM t1 JOIN LATERAL (SELECT t1.c1 AS a, t2.c1 AS b) s JOIN t2 ON s.b = struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t2`.`c1`",""],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`t2`.`c1`","objectList":""},"queryContext":[]} -- !query @@ -281,7 +281,7 @@ SELECT * FROM t1, LATERAL (SELECT c1 + c2 + rand(0) AS c3) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row\nSubqueryAlias __auto_generated_subquery_name\n+- Project [(cast((outer(c1#x) + outer(c2#x)) as double) + rand(0)) AS c3#x]\n +- OneRowRelation\n; line 1 pos 9"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row\nSubqueryAlias __auto_generated_subquery_name\n+- Project [(cast((outer(c1#x) + outer(c2#x)) as double) + rand(0)) AS c3#x]\n +- OneRowRelation\n; line 1 pos 9"},"queryContext":[]} -- !query @@ -290,7 +290,7 @@ SELECT * FROM t1, LATERAL (SELECT rand(0) FROM t2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row\nSubqueryAlias __auto_generated_subquery_name\n+- Project [rand(0) AS rand(0)#x]\n +- SubqueryAlias spark_catalog.default.t2\n +- View (`spark_catalog`.`default`.`t2`, [c1#x,c2#x])\n +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n +- LocalRelation [col1#x, col2#x]\n; line 1 pos 9"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row\nSubqueryAlias __auto_generated_subquery_name\n+- Project [rand(0) AS rand(0)#x]\n +- SubqueryAlias spark_catalog.default.t2\n +- View (`spark_catalog`.`default`.`t2`, [c1#x,c2#x])\n +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n +- LocalRelation [col1#x, col2#x]\n; line 1 pos 9"},"queryContext":[]} -- !query @@ -299,7 +299,7 @@ SELECT * FROM t1 JOIN LATERAL (SELECT * FROM t2) s ON t1.c1 + rand(0) = s.c1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Lateral join condition cannot be non-deterministic: ((CAST(spark_catalog.default.t1.c1 AS DOUBLE) + rand(0)) = CAST(s.c1 AS DOUBLE)); line 1 pos 17"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Lateral join condition cannot be non-deterministic: ((CAST(spark_catalog.default.t1.c1 AS DOUBLE) + rand(0)) = CAST(s.c1 AS DOUBLE)); line 1 pos 17"},"queryContext":[]} -- !query @@ -363,7 +363,7 @@ SELECT * FROM t1, LATERAL (SELECT * FROM t2, LATERAL (SELECT t1.c1 + t2.c1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t1`.`c1`",""],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`t1`.`c1`","objectList":""},"queryContext":[]} -- !query @@ -372,7 +372,7 @@ SELECT * FROM t1, LATERAL (SELECT * FROM (SELECT c1), LATERAL (SELECT c2)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`c2`",""],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`c2`","objectList":""},"queryContext":[]} -- !query @@ -399,7 +399,7 @@ SELECT * FROM t1, LATERAL (SELECT c1, (SELECT SUM(c2) FROM t2 WHERE c1 = t1.c1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t1`.`c1`","`spark_catalog`.`default`.`t2`.`c1`, `spark_catalog`.`default`.`t2`.`c2`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`t1`.`c1`","objectList":"`spark_catalog`.`default`.`t2`.`c1`, `spark_catalog`.`default`.`t2`.`c2`"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out index a2a9ac486c2c8..f4217387bf691 100644 --- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out @@ -69,7 +69,7 @@ select to_json(named_struct('a', 1, 'b', 2), named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Must use a map() function for options; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Must use a map() function for options; line 1 pos 7"},"queryContext":[]} -- !query @@ -78,7 +78,7 @@ select to_json(named_struct('a', 1, 'b', 2), map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["A type of keys and values in map() must be string, but got map; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"A type of keys and values in map() must be string, but got map; line 1 pos 7"},"queryContext":[]} -- !query @@ -87,7 +87,7 @@ select to_json() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function to_json. Expected: one of 1 and 2; Found: 0; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function to_json. Expected: one of 1 and 2; Found: 0; line 1 pos 7"},"queryContext":[]} -- !query @@ -112,7 +112,7 @@ select from_json('{"a":1}', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The expression '1' is not a valid schema string.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The expression '1' is not a valid schema string.; line 1 pos 7"},"queryContext":[]} -- !query @@ -121,7 +121,7 @@ select from_json('{"a":1}', 'a InvalidType') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot parse the data type: \n[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n\nFailed fallback parsing: \nDataType invalidtype is not supported.(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot parse the data type: \n[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n\nFailed fallback parsing: \nDataType invalidtype is not supported.(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n; line 1 pos 7"},"queryContext":[]} -- !query @@ -130,7 +130,7 @@ select from_json('{"a":1}', 'a INT', named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Must use a map() function for options; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Must use a map() function for options; line 1 pos 7"},"queryContext":[]} -- !query @@ -139,7 +139,7 @@ select from_json('{"a":1}', 'a INT', map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["A type of keys and values in map() must be string, but got map; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"A type of keys and values in map() must be string, but got map; line 1 pos 7"},"queryContext":[]} -- !query @@ -148,7 +148,7 @@ select from_json() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function from_json. Expected: one of 2 and 3; Found: 0; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function from_json. Expected: one of 2 and 3; Found: 0; line 1 pos 7"},"queryContext":[]} -- !query @@ -323,7 +323,7 @@ select from_json( struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'02-29'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'02-29'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -335,7 +335,7 @@ select from_json( struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":["'02-29'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"PARSE_DATETIME_BY_NEW_PARSER","messageParameters":{"datetime":"'02-29'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -376,7 +376,7 @@ select schema_of_json(null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'schema_of_json(NULL)' due to data type mismatch: The input json should be a foldable string expression and not null; however, got NULL.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'schema_of_json(NULL)' due to data type mismatch: The input json should be a foldable string expression and not null; however, got NULL.; line 1 pos 7"},"queryContext":[]} -- !query @@ -393,7 +393,7 @@ SELECT schema_of_json(jsonField) FROM jsonTable struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'schema_of_json(jsontable.jsonField)' due to data type mismatch: The input json should be a foldable string expression and not null; however, got jsontable.jsonField.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'schema_of_json(jsontable.jsonField)' due to data type mismatch: The input json should be a foldable string expression and not null; however, got jsontable.jsonField.; line 1 pos 7"},"queryContext":[]} -- !query @@ -410,7 +410,7 @@ select json_array_length(2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'json_array_length(2)' due to data type mismatch: argument 1 requires string type, however, '2' is of int type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'json_array_length(2)' due to data type mismatch: argument 1 requires string type, however, '2' is of int type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -419,7 +419,7 @@ select json_array_length() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function json_array_length. Expected: 1; Found: 0; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function json_array_length. Expected: 1; Found: 0; line 1 pos 7"},"queryContext":[]} -- !query @@ -492,7 +492,7 @@ select json_object_keys() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function json_object_keys. Expected: 1; Found: 0; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function json_object_keys. Expected: 1; Found: 0; line 1 pos 7"},"queryContext":[]} -- !query @@ -509,7 +509,7 @@ select json_object_keys(200) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'json_object_keys(200)' due to data type mismatch: argument 1 requires string type, however, '200' is of int type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'json_object_keys(200)' due to data type mismatch: argument 1 requires string type, however, '200' is of int type.; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/like-all.sql.out b/sql/core/src/test/resources/sql-tests/results/like-all.sql.out index cfa3cc77be566..d3bca62168063 100644 --- a/sql/core/src/test/resources/sql-tests/results/like-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/like-all.sql.out @@ -129,4 +129,4 @@ SELECT company FROM like_all_table WHERE company LIKE ALL () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nExpected something between '(' and ')'.(line 1, pos 49)\n\n== SQL ==\nSELECT company FROM like_all_table WHERE company LIKE ALL ()\n-------------------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nExpected something between '(' and ')'.(line 1, pos 49)\n\n== SQL ==\nSELECT company FROM like_all_table WHERE company LIKE ALL ()\n-------------------------------------------------^^^\n"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/like-any.sql.out b/sql/core/src/test/resources/sql-tests/results/like-any.sql.out index 01f8702ee759b..7ff0ec6833bcb 100644 --- a/sql/core/src/test/resources/sql-tests/results/like-any.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/like-any.sql.out @@ -135,4 +135,4 @@ SELECT company FROM like_any_table WHERE company LIKE ANY () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nExpected something between '(' and ')'.(line 1, pos 49)\n\n== SQL ==\nSELECT company FROM like_any_table WHERE company LIKE ANY ()\n-------------------------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nExpected something between '(' and ')'.(line 1, pos 49)\n\n== SQL ==\nSELECT company FROM like_any_table WHERE company LIKE ANY ()\n-------------------------------------------------^^^\n"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/limit.sql.out b/sql/core/src/test/resources/sql-tests/results/limit.sql.out index c23d6454290c4..3afa0e5679945 100644 --- a/sql/core/src/test/resources/sql-tests/results/limit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/limit.sql.out @@ -50,7 +50,7 @@ SELECT * FROM testdata LIMIT -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The limit expression must be equal to or greater than 0, but got -1"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The limit expression must be equal to or greater than 0, but got -1"},"queryContext":[]} -- !query @@ -59,7 +59,7 @@ SELECT * FROM testData TABLESAMPLE (-1 ROWS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The limit expression must be equal to or greater than 0, but got -1"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The limit expression must be equal to or greater than 0, but got -1"},"queryContext":[]} -- !query @@ -76,7 +76,7 @@ SELECT * FROM testdata LIMIT CAST(NULL AS INT) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The evaluated limit expression must not be null, but got CAST(NULL AS INT)"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The evaluated limit expression must not be null, but got CAST(NULL AS INT)"},"queryContext":[]} -- !query @@ -85,7 +85,7 @@ SELECT * FROM testdata LIMIT key > 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The limit expression must evaluate to a constant value, but got (spark_catalog.default.testdata.key > 3)"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The limit expression must evaluate to a constant value, but got (spark_catalog.default.testdata.key > 3)"},"queryContext":[]} -- !query @@ -94,7 +94,7 @@ SELECT * FROM testdata LIMIT true struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The limit expression must be integer type, but got boolean"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The limit expression must be integer type, but got boolean"},"queryContext":[]} -- !query @@ -103,7 +103,7 @@ SELECT * FROM testdata LIMIT 'a' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The limit expression must be integer type, but got string"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The limit expression must be integer type, but got string"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index 3085fdaeac7b1..3562286a2734d 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -37,7 +37,7 @@ select 128Y struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nNumeric literal 128 does not fit in range [-128, 127] for type tinyint(line 1, pos 7)\n\n== SQL ==\nselect 128Y\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nNumeric literal 128 does not fit in range [-128, 127] for type tinyint(line 1, pos 7)\n\n== SQL ==\nselect 128Y\n-------^^^\n"},"queryContext":[]} -- !query @@ -62,7 +62,7 @@ select 32768S struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nNumeric literal 32768 does not fit in range [-32768, 32767] for type smallint(line 1, pos 7)\n\n== SQL ==\nselect 32768S\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nNumeric literal 32768 does not fit in range [-32768, 32767] for type smallint(line 1, pos 7)\n\n== SQL ==\nselect 32768S\n-------^^^\n"},"queryContext":[]} -- !query @@ -87,7 +87,7 @@ select 9223372036854775808L struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nNumeric literal 9223372036854775808 does not fit in range [-9223372036854775808, 9223372036854775807] for type bigint(line 1, pos 7)\n\n== SQL ==\nselect 9223372036854775808L\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nNumeric literal 9223372036854775808 does not fit in range [-9223372036854775808, 9223372036854775807] for type bigint(line 1, pos 7)\n\n== SQL ==\nselect 9223372036854775808L\n-------^^^\n"},"queryContext":[]} -- !query @@ -128,7 +128,7 @@ select 1234567890123456789012345678901234567890 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890"},"queryContext":[]} -- !query @@ -137,7 +137,7 @@ select 1234567890123456789012345678901234567890.0 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890.0"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890.0"},"queryContext":[]} -- !query @@ -162,7 +162,7 @@ select -3.4028235E39f struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nNumeric literal -3.4028235E39 does not fit in range [-3.4028234663852886E+38, 3.4028234663852886E+38] for type float(line 1, pos 7)\n\n== SQL ==\nselect -3.4028235E39f\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nNumeric literal -3.4028235E39 does not fit in range [-3.4028234663852886E+38, 3.4028234663852886E+38] for type float(line 1, pos 7)\n\n== SQL ==\nselect -3.4028235E39f\n-------^^^\n"},"queryContext":[]} -- !query @@ -187,7 +187,7 @@ select .e3 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'.'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'.'","hint":""},"queryContext":[]} -- !query @@ -196,7 +196,7 @@ select 1E309, -1E309 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nNumeric literal 1E309 does not fit in range [-1.7976931348623157E+308, 1.7976931348623157E+308] for type double(line 1, pos 7)\n\n== SQL ==\nselect 1E309, -1E309\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nNumeric literal 1E309 does not fit in range [-1.7976931348623157E+308, 1.7976931348623157E+308] for type double(line 1, pos 7)\n\n== SQL ==\nselect 1E309, -1E309\n-------^^^\n"},"queryContext":[]} -- !query @@ -295,7 +295,7 @@ select date 'mar 11 2016' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: mar 11 2016(line 1, pos 7)\n\n== SQL ==\nselect date 'mar 11 2016'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: mar 11 2016(line 1, pos 7)\n\n== SQL ==\nselect date 'mar 11 2016'\n-------^^^\n"},"queryContext":[]} -- !query @@ -312,7 +312,7 @@ select timestamp '2016-33-11 20:54:00.000' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2016-33-11 20:54:00.000(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2016-33-11 20:54:00.000'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 2016-33-11 20:54:00.000(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2016-33-11 20:54:00.000'\n-------^^^\n"},"queryContext":[]} -- !query @@ -321,7 +321,7 @@ select GEO '(10,-6)' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nLiterals of type 'GEO' are currently not supported.(line 1, pos 7)\n\n== SQL ==\nselect GEO '(10,-6)'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nLiterals of type 'GEO' are currently not supported.(line 1, pos 7)\n\n== SQL ==\nselect GEO '(10,-6)'\n-------^^^\n"},"queryContext":[]} -- !query @@ -338,7 +338,7 @@ select 1.20E-38BD struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38(line 1, pos 7)\n\n== SQL ==\nselect 1.20E-38BD\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\ndecimal can only support precision up to 38(line 1, pos 7)\n\n== SQL ==\nselect 1.20E-38BD\n-------^^^\n"},"queryContext":[]} -- !query @@ -355,7 +355,7 @@ select X'XuZ' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\ncontains illegal character for hexBinary: 0XuZ(line 1, pos 7)\n\n== SQL ==\nselect X'XuZ'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\ncontains illegal character for hexBinary: 0XuZ(line 1, pos 7)\n\n== SQL ==\nselect X'XuZ'\n-------^^^\n"},"queryContext":[]} -- !query @@ -372,7 +372,7 @@ select +date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(+ DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(+ DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -381,7 +381,7 @@ select +timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -398,7 +398,7 @@ select +map(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(+ map(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'map(1, 2)' is of map type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(+ map(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'map(1, 2)' is of map type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -407,7 +407,7 @@ select +array(1,2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(+ array(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'array(1, 2)' is of array type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(+ array(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'array(1, 2)' is of array type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -416,7 +416,7 @@ select +named_struct('a', 1, 'b', 'spark') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(+ named_struct('a', 1, 'b', 'spark'))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'named_struct('a', 1, 'b', 'spark')' is of struct type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(+ named_struct('a', 1, 'b', 'spark'))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'named_struct('a', 1, 'b', 'spark')' is of struct type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -425,7 +425,7 @@ select +X'1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(+ X'01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'01'' is of binary type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(+ X'01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'01'' is of binary type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -434,7 +434,7 @@ select -date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(- DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(- DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -443,7 +443,7 @@ select -timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(- TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(- TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -452,4 +452,4 @@ select -x'2379ACFe' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(- X'2379ACFE')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'2379ACFE'' is of binary type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(- X'2379ACFE')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'2379ACFE'' is of binary type.; line 1 pos 7"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/map.sql.out b/sql/core/src/test/resources/sql-tests/results/map.sql.out index 8f4a909f4b464..b6d8a3767d194 100644 --- a/sql/core/src/test/resources/sql-tests/results/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/map.sql.out @@ -69,7 +69,7 @@ select map_contains_key(map('1', 'a', '2', 'b'), 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'map_contains_key(map('1', 'a', '2', 'b'), 1)' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, int].; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'map_contains_key(map('1', 'a', '2', 'b'), 1)' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, int].; line 1 pos 7"},"queryContext":[]} -- !query @@ -78,4 +78,4 @@ select map_contains_key(map(1, 'a', 2, 'b'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'map_contains_key(map(1, 'a', 2, 'b'), '1')' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, string].; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'map_contains_key(map(1, 'a', 2, 'b'), '1')' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, string].; line 1 pos 7"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out index ccecafafa58aa..3d9b011f10b6c 100644 --- a/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out @@ -69,7 +69,7 @@ SELECT assert_true(false) struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["'false' is not true!"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"'false' is not true!"},"queryContext":[]} -- !query @@ -78,7 +78,7 @@ SELECT assert_true(boolean(0)) struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["'cast(0 as boolean)' is not true!"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"'cast(0 as boolean)' is not true!"},"queryContext":[]} -- !query @@ -87,7 +87,7 @@ SELECT assert_true(null) struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["'null' is not true!"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"'null' is not true!"},"queryContext":[]} -- !query @@ -96,7 +96,7 @@ SELECT assert_true(boolean(null)) struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["'cast(null as boolean)' is not true!"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"'cast(null as boolean)' is not true!"},"queryContext":[]} -- !query @@ -105,7 +105,7 @@ SELECT assert_true(false, 'custom error message') struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["custom error message"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"custom error message"},"queryContext":[]} -- !query @@ -122,7 +122,7 @@ SELECT raise_error('error message') struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["error message"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"error message"},"queryContext":[]} -- !query @@ -131,4 +131,4 @@ SELECT if(v > 5, raise_error('too big: ' || v), v + 1) FROM tbl_misc struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["too big: 8"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"too big: 8"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/natural-join.sql.out b/sql/core/src/test/resources/sql-tests/results/natural-join.sql.out index 4f7bba18a7ac1..c53fdae5431bb 100644 --- a/sql/core/src/test/resources/sql-tests/results/natural-join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/natural-join.sql.out @@ -229,7 +229,7 @@ SELECT nt2.k FROM (SELECT * FROM nt1 natural join nt2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`nt2`.`k`","`__auto_generated_subquery_name`.`k`, `__auto_generated_subquery_name`.`v1`, `__auto_generated_subquery_name`.`v2`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`nt2`.`k`","objectList":"`__auto_generated_subquery_name`.`k`, `__auto_generated_subquery_name`.`v1`, `__auto_generated_subquery_name`.`v2`"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out b/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out index 1aa6259dc23d1..a66966d811585 100644 --- a/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out @@ -72,7 +72,7 @@ select * from data order by 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["ORDER BY position 0 is not in select list (valid range is [1, 2]); line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"ORDER BY position 0 is not in select list (valid range is [1, 2]); line 1 pos 28"},"queryContext":[]} -- !query @@ -81,7 +81,7 @@ select * from data order by -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["ORDER BY position -1 is not in select list (valid range is [1, 2]); line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"ORDER BY position -1 is not in select list (valid range is [1, 2]); line 1 pos 28"},"queryContext":[]} -- !query @@ -90,7 +90,7 @@ select * from data order by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["ORDER BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"ORDER BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 28"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out b/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out index 1a0e4f49971f6..5d27f15e79c37 100644 --- a/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out @@ -178,7 +178,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_cont'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot specify order by or frame for 'percentile_cont'."},"queryContext":[]} -- !query @@ -194,7 +194,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_disc'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot specify order by or frame for 'percentile_disc'."},"queryContext":[]} -- !query @@ -209,7 +209,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'median'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot specify order by or frame for 'median'."},"queryContext":[]} -- !query @@ -225,7 +225,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_cont'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot specify order by or frame for 'percentile_cont'."},"queryContext":[]} -- !query @@ -241,7 +241,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_disc'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot specify order by or frame for 'percentile_disc'."},"queryContext":[]} -- !query @@ -256,7 +256,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'median'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot specify order by or frame for 'median'."},"queryContext":[]} -- !query @@ -336,7 +336,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_cont'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot specify order by or frame for 'percentile_cont'."},"queryContext":[]} -- !query @@ -353,7 +353,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_disc'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot specify order by or frame for 'percentile_disc'."},"queryContext":[]} -- !query @@ -369,7 +369,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'median'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot specify order by or frame for 'median'."},"queryContext":[]} -- !query @@ -386,7 +386,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_cont'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot specify order by or frame for 'percentile_cont'."},"queryContext":[]} -- !query @@ -403,7 +403,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'percentile_disc'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot specify order by or frame for 'percentile_disc'."},"queryContext":[]} -- !query @@ -419,7 +419,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Cannot specify order by or frame for 'median'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Cannot specify order by or frame for 'median'."},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/pivot.sql.out b/sql/core/src/test/resources/sql-tests/results/pivot.sql.out index c43199ea50bca..f42c4a01475ba 100644 --- a/sql/core/src/test/resources/sql-tests/results/pivot.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/pivot.sql.out @@ -199,7 +199,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Aggregate expression required for pivot, but 'coursesales.earnings' did not appear in any aggregate function."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Aggregate expression required for pivot, but 'coursesales.earnings' did not appear in any aggregate function."},"queryContext":[]} -- !query @@ -214,7 +214,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Aggregate expression required for pivot, but '__auto_generated_subquery_name.year' did not appear in any aggregate function."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Aggregate expression required for pivot, but '__auto_generated_subquery_name.year' did not appear in any aggregate function."},"queryContext":[]} -- !query @@ -229,7 +229,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`year`","`__auto_generated_subquery_name`.`course`, `__auto_generated_subquery_name`.`earnings`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`year`","objectList":"`__auto_generated_subquery_name`.`course`, `__auto_generated_subquery_name`.`earnings`"},"queryContext":[]} -- !query @@ -259,7 +259,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."},"queryContext":[]} -- !query @@ -310,7 +310,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"PIVOT_VALUE_DATA_TYPE_MISMATCH","sqlState":"42000","messageParameters":["dotNET","string","struct"],"queryContext":[]} +{"errorClass":"PIVOT_VALUE_DATA_TYPE_MISMATCH","sqlState":"42000","messageParameters":{"value":"dotNET","valueType":"string","pivotType":"struct"},"queryContext":[]} -- !query @@ -323,7 +323,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`s`","`coursesales`.`year`, `coursesales`.`course`, `coursesales`.`earnings`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`s`","objectList":"`coursesales`.`year`, `coursesales`.`course`, `coursesales`.`earnings`"},"queryContext":[]} -- !query @@ -336,7 +336,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"NON_LITERAL_PIVOT_VALUES","sqlState":"42000","messageParameters":["\"course\""],"queryContext":[]} +{"errorClass":"NON_LITERAL_PIVOT_VALUES","sqlState":"42000","messageParameters":{"expression":"\"course\""},"queryContext":[]} -- !query @@ -455,7 +455,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"INCOMPARABLE_PIVOT_COLUMN","sqlState":"42000","messageParameters":["`__auto_generated_subquery_name`.`m`"],"queryContext":[]} +{"errorClass":"INCOMPARABLE_PIVOT_COLUMN","sqlState":"42000","messageParameters":{"columnName":"`__auto_generated_subquery_name`.`m`"},"queryContext":[]} -- !query @@ -472,7 +472,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"INCOMPARABLE_PIVOT_COLUMN","sqlState":"42000","messageParameters":["`named_struct('course', __auto_generated_subquery_name`.`course, 'm', __auto_generated_subquery_name`.`m)`"],"queryContext":[]} +{"errorClass":"INCOMPARABLE_PIVOT_COLUMN","sqlState":"42000","messageParameters":{"columnName":"`named_struct('course', __auto_generated_subquery_name`.`course, 'm', __auto_generated_subquery_name`.`m)`"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out index f64b771c25a7a..398f086b59e8b 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out @@ -484,7 +484,7 @@ having exists (select 1 from onek b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: sum(DISTINCT (outer(a.four) + b.four))"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: sum(DISTINCT (outer(a.four) + b.four))"},"queryContext":[]} -- !query @@ -495,4 +495,4 @@ from tenk1 o struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`o`.`unique1`","`i`.`unique1`, `i`.`unique2`, `i`.`hundred`, `i`.`even`, `i`.`four`, `i`.`stringu1`, `i`.`ten`, `i`.`odd`, `i`.`string4`, `i`.`stringu2`, `i`.`tenthous`, `i`.`twenty`, `i`.`two`, `i`.`thousand`, `i`.`fivethous`, `i`.`twothousand`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`o`.`unique1`","objectList":"`i`.`unique1`, `i`.`unique2`, `i`.`hundred`, `i`.`even`, `i`.`four`, `i`.`stringu1`, `i`.`ten`, `i`.`odd`, `i`.`string4`, `i`.`stringu2`, `i`.`tenthous`, `i`.`twenty`, `i`.`two`, `i`.`thousand`, `i`.`fivethous`, `i`.`twothousand`"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out index cbd9956362887..1948657f3d2a3 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out @@ -5,7 +5,7 @@ select max(min(unique1)) from tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out index 2f132c78eb7de..2bf80fd1d0f78 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out @@ -53,7 +53,7 @@ SELECT boolean('test') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'test'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"boolean('test'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'test'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"boolean('test'"}]} -- !query @@ -70,7 +70,7 @@ SELECT boolean('foo') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'foo'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('foo'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'foo'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('foo'"}]} -- !query @@ -95,7 +95,7 @@ SELECT boolean('yeah') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'yeah'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"boolean('yeah'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'yeah'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"boolean('yeah'"}]} -- !query @@ -120,7 +120,7 @@ SELECT boolean('nay') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'nay'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('nay'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'nay'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('nay'"}]} -- !query @@ -129,7 +129,7 @@ SELECT boolean('on') AS true struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'on'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":19,"fragment":"boolean('on'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'on'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":19,"fragment":"boolean('on'"}]} -- !query @@ -138,7 +138,7 @@ SELECT boolean('off') AS `false` struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'off'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('off'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'off'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('off'"}]} -- !query @@ -147,7 +147,7 @@ SELECT boolean('of') AS `false` struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'of'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":19,"fragment":"boolean('of'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'of'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":19,"fragment":"boolean('of'"}]} -- !query @@ -156,7 +156,7 @@ SELECT boolean('o') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'o'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":18,"fragment":"boolean('o'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'o'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":18,"fragment":"boolean('o'"}]} -- !query @@ -165,7 +165,7 @@ SELECT boolean('on_') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'on_'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('on_'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'on_'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('on_'"}]} -- !query @@ -174,7 +174,7 @@ SELECT boolean('off_') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'off_'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"boolean('off_'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'off_'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"boolean('off_'"}]} -- !query @@ -191,7 +191,7 @@ SELECT boolean('11') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'11'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":19,"fragment":"boolean('11'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'11'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":19,"fragment":"boolean('11'"}]} -- !query @@ -208,7 +208,7 @@ SELECT boolean('000') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'000'","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('000'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'000'","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"boolean('000'"}]} -- !query @@ -217,7 +217,7 @@ SELECT boolean('') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["''","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":17,"fragment":"boolean(''"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"''","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":17,"fragment":"boolean(''"}]} -- !query @@ -323,7 +323,7 @@ SELECT boolean(string(' tru e ')) AS invalid struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["' tru e '","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":33,"fragment":"boolean(string(' tru e ')"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"' tru e '","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":33,"fragment":"boolean(string(' tru e ')"}]} -- !query @@ -332,7 +332,7 @@ SELECT boolean(string('')) AS invalid struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["''","\"STRING\"","\"BOOLEAN\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":25,"fragment":"boolean(string('')"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"''","sourceType":"\"STRING\"","targetType":"\"BOOLEAN\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":25,"fragment":"boolean(string('')"}]} -- !query @@ -476,7 +476,7 @@ INSERT INTO BOOLTBL2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["failed to evaluate expression CAST('XXX' AS BOOLEAN): [CAST_INVALID_INPUT] The value 'XXX' of the type \"STRING\" cannot be cast to \"BOOLEAN\" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.\n== SQL(line 2, position 12) ==\n VALUES (boolean('XXX'))\n ^^^^^^^^^^^^^^\n; line 2 pos 3"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"failed to evaluate expression CAST('XXX' AS BOOLEAN): [CAST_INVALID_INPUT] The value 'XXX' of the type \"STRING\" cannot be cast to \"BOOLEAN\" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.\n== SQL(line 2, position 12) ==\n VALUES (boolean('XXX'))\n ^^^^^^^^^^^^^^\n; line 2 pos 3"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out index 6329299e7b7ef..92a423d2ce966 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out @@ -53,7 +53,7 @@ CREATE VIEW key_dependent_view AS struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expression 'spark_catalog.default.view_base_table.data' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expression 'spark_catalog.default.view_base_table.data' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -63,7 +63,7 @@ CREATE VIEW key_dependent_view_no_cols AS struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`FROM`",""],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`FROM`","objectList":""},"queryContext":[]} -- !query @@ -257,7 +257,7 @@ CREATE VIEW v1_temp AS SELECT * FROM temp_table struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v1_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v1_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query @@ -313,7 +313,7 @@ CREATE VIEW temp_view_test.v3_temp AS SELECT * FROM temp_table struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v3_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v3_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query @@ -362,7 +362,7 @@ CREATE VIEW v4_temp AS struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v4_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v4_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query @@ -374,7 +374,7 @@ CREATE VIEW v5_temp AS struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v5_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v5_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query @@ -533,7 +533,7 @@ CREATE VIEW v6_temp AS SELECT * FROM base_table WHERE id IN (SELECT id FROM temp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v6_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v6_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query @@ -542,7 +542,7 @@ CREATE VIEW v7_temp AS SELECT t1.id, t2.a FROM base_table t1, (SELECT * FROM tem struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v7_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v7_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query @@ -551,7 +551,7 @@ CREATE VIEW v8_temp AS SELECT * FROM base_table WHERE EXISTS (SELECT 1 FROM temp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v8_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v8_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query @@ -560,7 +560,7 @@ CREATE VIEW v9_temp AS SELECT * FROM base_table WHERE NOT EXISTS (SELECT 1 FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v9_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v9_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query @@ -569,7 +569,7 @@ CREATE VIEW v10_temp AS SELECT * FROM v7_temp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table or view not found: v7_temp; line 1 pos 38"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view not found: v7_temp; line 1 pos 38"},"queryContext":[]} -- !query @@ -578,7 +578,7 @@ CREATE VIEW v11_temp AS SELECT t1.id, t2.a FROM base_table t1, v10_temp t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table or view not found: v10_temp; line 1 pos 63"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view not found: v10_temp; line 1 pos 63"},"queryContext":[]} -- !query @@ -587,7 +587,7 @@ CREATE VIEW v12_temp AS SELECT true FROM v11_temp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table or view not found: v11_temp; line 1 pos 41"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view not found: v11_temp; line 1 pos 41"},"queryContext":[]} -- !query @@ -669,7 +669,7 @@ CREATE VIEW temporal1 AS SELECT * FROM t1 CROSS JOIN tt struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal1` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal1` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query @@ -710,7 +710,7 @@ CREATE VIEW temporal2 AS SELECT * FROM t1 INNER JOIN tt ON t1.num = tt.num2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal2` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal2` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query @@ -751,7 +751,7 @@ CREATE VIEW temporal3 AS SELECT * FROM t1 LEFT JOIN tt ON t1.num = tt.num2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal3` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal3` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query @@ -792,7 +792,7 @@ CREATE VIEW temporal4 AS SELECT * FROM t1 LEFT JOIN tt ON t1.num = tt.num2 AND t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal4` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal4` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query @@ -801,7 +801,7 @@ CREATE VIEW temporal5 AS SELECT * FROM t1 WHERE num IN (SELECT num FROM t1 WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal5` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal5` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out index 899fb494c801a..779313a00bcfb 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out @@ -198,7 +198,7 @@ SELECT date '1999 Jan 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 Jan 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 Jan 08'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 1999 Jan 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 Jan 08'\n-------^^^\n"},"queryContext":[]} -- !query @@ -207,7 +207,7 @@ SELECT date '1999 08 Jan' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 08 Jan(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 Jan'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 1999 08 Jan(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 Jan'\n-------^^^\n"},"queryContext":[]} -- !query @@ -232,7 +232,7 @@ SELECT date '1999 01 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 01 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 01 08'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 1999 01 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 01 08'\n-------^^^\n"},"queryContext":[]} -- !query @@ -241,7 +241,7 @@ SELECT date '1999 08 01' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 08 01(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 01'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 1999 08 01(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 01'\n-------^^^\n"},"queryContext":[]} -- !query @@ -258,7 +258,7 @@ SELECT date '1999 Jan 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 Jan 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 Jan 08'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 1999 Jan 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 Jan 08'\n-------^^^\n"},"queryContext":[]} -- !query @@ -267,7 +267,7 @@ SELECT date '1999 08 Jan' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 08 Jan(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 Jan'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 1999 08 Jan(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 Jan'\n-------^^^\n"},"queryContext":[]} -- !query @@ -292,7 +292,7 @@ SELECT date '1999 01 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 01 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 01 08'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 1999 01 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 01 08'\n-------^^^\n"},"queryContext":[]} -- !query @@ -301,7 +301,7 @@ SELECT date '1999 08 01' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 08 01(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 01'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 1999 08 01(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 01'\n-------^^^\n"},"queryContext":[]} -- !query @@ -326,7 +326,7 @@ SELECT date '1999 Jan 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 Jan 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 Jan 08'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 1999 Jan 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 Jan 08'\n-------^^^\n"},"queryContext":[]} -- !query @@ -335,7 +335,7 @@ SELECT date '1999 08 Jan' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 08 Jan(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 Jan'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 1999 08 Jan(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 Jan'\n-------^^^\n"},"queryContext":[]} -- !query @@ -360,7 +360,7 @@ SELECT date '1999 01 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 01 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 01 08'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 1999 01 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 01 08'\n-------^^^\n"},"queryContext":[]} -- !query @@ -369,7 +369,7 @@ SELECT date '1999 08 01' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the DATE value: 1999 08 01(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 01'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the DATE value: 1999 08 01(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 01'\n-------^^^\n"},"queryContext":[]} -- !query @@ -518,7 +518,7 @@ select make_date(2013, 2, 30) struct<> -- !query output java.time.DateTimeException -{"errorClass":"legacy","messageParameters":["Invalid date 'FEBRUARY 30'. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid date 'FEBRUARY 30'. If necessary set spark.sql.ansi.enabled to false to bypass this error."},"queryContext":[]} -- !query @@ -527,7 +527,7 @@ select make_date(2013, 13, 1) struct<> -- !query output java.time.DateTimeException -{"errorClass":"legacy","messageParameters":["Invalid value for MonthOfYear (valid values 1 - 12): 13. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid value for MonthOfYear (valid values 1 - 12): 13. If necessary set spark.sql.ansi.enabled to false to bypass this error."},"queryContext":[]} -- !query @@ -536,7 +536,7 @@ select make_date(2013, 11, -1) struct<> -- !query output java.time.DateTimeException -{"errorClass":"legacy","messageParameters":["Invalid value for DayOfMonth (valid values 1 - 28/31): -1. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid value for DayOfMonth (valid values 1 - 28/31): -1. If necessary set spark.sql.ansi.enabled to false to bypass this error."},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out index 2be70bb8db988..bb90679379aa7 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out @@ -93,7 +93,7 @@ SELECT float('N A N') struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'N A N'","\"STRING\"","\"FLOAT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"float('N A N'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'N A N'","sourceType":"\"STRING\"","targetType":"\"FLOAT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"float('N A N'"}]} -- !query @@ -102,7 +102,7 @@ SELECT float('NaN x') struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'NaN x'","\"STRING\"","\"FLOAT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"float('NaN x'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'NaN x'","sourceType":"\"STRING\"","targetType":"\"FLOAT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":20,"fragment":"float('NaN x'"}]} -- !query @@ -111,7 +111,7 @@ SELECT float(' INFINITY x') struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["' INFINITY x'","\"STRING\"","\"FLOAT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"float(' INFINITY x'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"' INFINITY x'","sourceType":"\"STRING\"","targetType":"\"FLOAT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"float(' INFINITY x'"}]} -- !query @@ -144,7 +144,7 @@ SELECT float(decimal('nan')) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'nan'","\"STRING\"","\"DECIMAL(10,0)\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":13,"stopIndex":26,"fragment":"decimal('nan'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'nan'","sourceType":"\"STRING\"","targetType":"\"DECIMAL(10,0)\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":13,"stopIndex":26,"fragment":"decimal('nan'"}]} -- !query @@ -325,7 +325,7 @@ SELECT int(float('2147483647')) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["2.14748365E9","\"FLOAT\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"2.14748365E9","sourceType":"\"FLOAT\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -342,7 +342,7 @@ SELECT int(float('-2147483900')) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["-2.1474839E9","\"FLOAT\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"-2.1474839E9","sourceType":"\"FLOAT\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -375,7 +375,7 @@ SELECT bigint(float('-9223380000000000000')) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["-9.22338E18","\"FLOAT\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"-9.22338E18","sourceType":"\"FLOAT\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out index 8968bee9a31ac..157ca5c1f6062 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out @@ -125,7 +125,7 @@ SELECT double('N A N') struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'N A N'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"double('N A N'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'N A N'","sourceType":"\"STRING\"","targetType":"\"DOUBLE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"double('N A N'"}]} -- !query @@ -134,7 +134,7 @@ SELECT double('NaN x') struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'NaN x'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"double('NaN x'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'NaN x'","sourceType":"\"STRING\"","targetType":"\"DOUBLE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"double('NaN x'"}]} -- !query @@ -143,7 +143,7 @@ SELECT double(' INFINITY x') struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["' INFINITY x'","\"STRING\"","\"DOUBLE\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":30,"fragment":"double(' INFINITY x'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"' INFINITY x'","sourceType":"\"STRING\"","targetType":"\"DOUBLE\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":30,"fragment":"double(' INFINITY x'"}]} -- !query @@ -176,7 +176,7 @@ SELECT double(decimal('nan')) struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'nan'","\"STRING\"","\"DECIMAL(10,0)\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":14,"stopIndex":27,"fragment":"decimal('nan'"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'nan'","sourceType":"\"STRING\"","targetType":"\"DECIMAL(10,0)\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":14,"stopIndex":27,"fragment":"decimal('nan'"}]} -- !query @@ -830,7 +830,7 @@ SELECT bigint(double('-9223372036854780000')) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["-9.22337203685478E18D","\"DOUBLE\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"-9.22337203685478E18D","sourceType":"\"DOUBLE\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out index e83eee6d78b03..13efda519d195 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out @@ -197,7 +197,7 @@ SELECT '' AS five, i.f1, i.f1 * smallint('2') AS x FROM INT4_TBL i struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_multiply' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":44,"fragment":"i.f1 * smallint('2'"}]} +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"integer overflow","alternative":" Use 'try_multiply' to tolerate overflow and return NULL instead.","config":"spark.sql.ansi.enabled"},"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":44,"fragment":"i.f1 * smallint('2'"}]} -- !query @@ -217,7 +217,7 @@ SELECT '' AS five, i.f1, i.f1 * int('2') AS x FROM INT4_TBL i struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_multiply' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":39,"fragment":"i.f1 * int('2'"}]} +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"integer overflow","alternative":" Use 'try_multiply' to tolerate overflow and return NULL instead.","config":"spark.sql.ansi.enabled"},"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":39,"fragment":"i.f1 * int('2'"}]} -- !query @@ -237,7 +237,7 @@ SELECT '' AS five, i.f1, i.f1 + smallint('2') AS x FROM INT4_TBL i struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_add' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":44,"fragment":"i.f1 + smallint('2'"}]} +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"integer overflow","alternative":" Use 'try_add' to tolerate overflow and return NULL instead.","config":"spark.sql.ansi.enabled"},"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":44,"fragment":"i.f1 + smallint('2'"}]} -- !query @@ -258,7 +258,7 @@ SELECT '' AS five, i.f1, i.f1 + int('2') AS x FROM INT4_TBL i struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_add' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":39,"fragment":"i.f1 + int('2'"}]} +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"integer overflow","alternative":" Use 'try_add' to tolerate overflow and return NULL instead.","config":"spark.sql.ansi.enabled"},"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":39,"fragment":"i.f1 + int('2'"}]} -- !query @@ -279,7 +279,7 @@ SELECT '' AS five, i.f1, i.f1 - smallint('2') AS x FROM INT4_TBL i struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_subtract' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":44,"fragment":"i.f1 - smallint('2'"}]} +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"integer overflow","alternative":" Use 'try_subtract' to tolerate overflow and return NULL instead.","config":"spark.sql.ansi.enabled"},"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":44,"fragment":"i.f1 - smallint('2'"}]} -- !query @@ -300,7 +300,7 @@ SELECT '' AS five, i.f1, i.f1 - int('2') AS x FROM INT4_TBL i struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["integer overflow"," Use 'try_subtract' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":39,"fragment":"i.f1 - int('2'"}]} +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"integer overflow","alternative":" Use 'try_subtract' to tolerate overflow and return NULL instead.","config":"spark.sql.ansi.enabled"},"queryContext":[{"objectType":"","objectName":"","startIndex":25,"stopIndex":39,"fragment":"i.f1 - int('2'"}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out index 606279c1accae..6fca4bb396770 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out @@ -389,7 +389,7 @@ SELECT '' AS three, q1, q2, q1 * q2 AS multiply FROM INT8_TBL struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["long overflow"," Use 'try_multiply' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":28,"stopIndex":34,"fragment":"q1 * q"}]} +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"long overflow","alternative":" Use 'try_multiply' to tolerate overflow and return NULL instead.","config":"spark.sql.ansi.enabled"},"queryContext":[{"objectType":"","objectName":"","startIndex":28,"stopIndex":34,"fragment":"q1 * q"}]} -- !query @@ -644,7 +644,7 @@ select bigint('9223372036854775800') / bigint('0') struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":49,"fragment":"bigint('9223372036854775800') / bigint('0'"}]} +{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":{"config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":49,"fragment":"bigint('9223372036854775800') / bigint('0'"}]} -- !query @@ -653,7 +653,7 @@ select bigint('-9223372036854775808') / smallint('0') struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":52,"fragment":"bigint('-9223372036854775808') / smallint('0'"}]} +{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":{"config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":52,"fragment":"bigint('-9223372036854775808') / smallint('0'"}]} -- !query @@ -662,7 +662,7 @@ select smallint('100') / bigint('0') struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":35,"fragment":"smallint('100') / bigint('0'"}]} +{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":{"config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":35,"fragment":"smallint('100') / bigint('0'"}]} -- !query @@ -679,7 +679,7 @@ SELECT CAST(q1 AS int) FROM int8_tbl WHERE q2 <> 456 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["4567890123456789L","\"BIGINT\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"4567890123456789L","sourceType":"\"BIGINT\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -696,7 +696,7 @@ SELECT CAST(q1 AS smallint) FROM int8_tbl WHERE q2 <> 456 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["4567890123456789L","\"BIGINT\"","\"SMALLINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"4567890123456789L","sourceType":"\"BIGINT\"","targetType":"\"SMALLINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -733,7 +733,7 @@ SELECT CAST(double('922337203685477580700.0') AS bigint) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["9.223372036854776E20D","\"DOUBLE\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"9.223372036854776E20D","sourceType":"\"DOUBLE\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -771,7 +771,7 @@ SELECT * FROM range(bigint('+4567890123456789'), bigint('+4567890123456799'), 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (long, long, integer): requirement failed: step (0) cannot be 0; line 1 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (long, long, integer): requirement failed: step (0) cannot be 0; line 1 pos 14"},"queryContext":[]} -- !query @@ -800,7 +800,7 @@ SELECT string(int(shiftleft(bigint(-1), 63))+1) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":["-9223372036854775808L","\"BIGINT\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CAST_OVERFLOW","sqlState":"22005","messageParameters":{"value":"-9223372036854775808L","sourceType":"\"BIGINT\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -809,7 +809,7 @@ SELECT bigint((-9223372036854775808)) * bigint((-1)) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["long overflow"," Use 'try_multiply' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":51,"fragment":"bigint((-9223372036854775808)) * bigint((-1)"}]} +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"long overflow","alternative":" Use 'try_multiply' to tolerate overflow and return NULL instead.","config":"spark.sql.ansi.enabled"},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":51,"fragment":"bigint((-9223372036854775808)) * bigint((-1)"}]} -- !query @@ -834,7 +834,7 @@ SELECT bigint((-9223372036854775808)) * int((-1)) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["long overflow"," Use 'try_multiply' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":48,"fragment":"bigint((-9223372036854775808)) * int((-1)"}]} +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"long overflow","alternative":" Use 'try_multiply' to tolerate overflow and return NULL instead.","config":"spark.sql.ansi.enabled"},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":48,"fragment":"bigint((-9223372036854775808)) * int((-1)"}]} -- !query @@ -859,7 +859,7 @@ SELECT bigint((-9223372036854775808)) * smallint((-1)) struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["long overflow"," Use 'try_multiply' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":53,"fragment":"bigint((-9223372036854775808)) * smallint((-1)"}]} +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"long overflow","alternative":" Use 'try_multiply' to tolerate overflow and return NULL instead.","config":"spark.sql.ansi.enabled"},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":53,"fragment":"bigint((-9223372036854775808)) * smallint((-1)"}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out index f344f6b347216..88c693cf94df8 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out @@ -101,7 +101,7 @@ SELECT interval '1 2:03' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' day to hour\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' day to hour\n----------------^^^\n"},"queryContext":[]} -- !query @@ -110,7 +110,7 @@ SELECT interval '1 2:03:04' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' day to hour\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' day to hour\n----------------^^^\n"},"queryContext":[]} -- !query @@ -127,7 +127,7 @@ SELECT interval '1 2:03:04' day to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' day to minute\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' day to minute\n----------------^^^\n"},"queryContext":[]} -- !query @@ -136,7 +136,7 @@ SELECT interval '1 2:03' day to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' day to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' day to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -153,7 +153,7 @@ SELECT interval '1 2:03' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' hour to minute\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' hour to minute\n----------------^^^\n"},"queryContext":[]} -- !query @@ -162,7 +162,7 @@ SELECT interval '1 2:03:04' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' hour to minute\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' hour to minute\n----------------^^^\n"},"queryContext":[]} -- !query @@ -171,7 +171,7 @@ SELECT interval '1 2:03' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' hour to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' hour to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -180,7 +180,7 @@ SELECT interval '1 2:03:04' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' hour to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' hour to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -189,7 +189,7 @@ SELECT interval '1 2:03' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' minute to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' minute to second\n----------------^^^\n"},"queryContext":[]} -- !query @@ -198,4 +198,4 @@ SELECT interval '1 2:03:04' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' minute to second\n----------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' minute to second\n----------------^^^\n"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out index 20eb695b1962b..30ee5f0aa57b5 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out @@ -546,7 +546,7 @@ SELECT '' AS `xxx`, i, k, t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'i' is ambiguous, could be: spark_catalog.default.j1_tbl.i, spark_catalog.default.j2_tbl.i.; line 1 pos 20"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'i' is ambiguous, could be: spark_catalog.default.j1_tbl.i, spark_catalog.default.j2_tbl.i.; line 1 pos 20"},"queryContext":[]} -- !query @@ -3235,7 +3235,7 @@ select * from struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'f1' is ambiguous, could be: j.f1, j.f1.; line 2 pos 63"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'f1' is ambiguous, could be: j.f1, j.f1.; line 2 pos 63"},"queryContext":[]} -- !query @@ -3245,7 +3245,7 @@ select * from struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`y`.`f1`","`j`.`f1`, `j`.`f1`, `x`.`q1`, `x`.`q2`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`y`.`f1`","objectList":"`j`.`f1`, `j`.`f1`, `x`.`q1`, `x`.`q2`"},"queryContext":[]} -- !query @@ -3264,7 +3264,7 @@ select t1.uunique1 from struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t1`.`uunique1`","`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`stringu1`, `t1`.`even`, `t1`.`four`, `t1`.`string4`, `t2`.`stringu1`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`even`, `t2`.`four`, `t1`.`odd`, `t2`.`string4`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`odd`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`t1`.`uunique1`","objectList":"`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`stringu1`, `t1`.`even`, `t1`.`four`, `t1`.`string4`, `t2`.`stringu1`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`even`, `t2`.`four`, `t1`.`odd`, `t2`.`string4`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`odd`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`"},"queryContext":[]} -- !query @@ -3274,7 +3274,7 @@ select t2.uunique1 from struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t2`.`uunique1`","`t2`.`unique1`, `t1`.`unique1`, `t2`.`unique2`, `t1`.`unique2`, `t2`.`hundred`, `t1`.`hundred`, `t2`.`stringu1`, `t2`.`even`, `t2`.`four`, `t2`.`string4`, `t1`.`stringu1`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`even`, `t1`.`four`, `t2`.`odd`, `t1`.`string4`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`odd`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`twothousand`, `t1`.`twothousand`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`t2`.`uunique1`","objectList":"`t2`.`unique1`, `t1`.`unique1`, `t2`.`unique2`, `t1`.`unique2`, `t2`.`hundred`, `t1`.`hundred`, `t2`.`stringu1`, `t2`.`even`, `t2`.`four`, `t2`.`string4`, `t1`.`stringu1`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`even`, `t1`.`four`, `t2`.`odd`, `t1`.`string4`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`odd`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`twothousand`, `t1`.`twothousand`"},"queryContext":[]} -- !query @@ -3284,7 +3284,7 @@ select uunique1 from struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`uunique1`","`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`even`, `t2`.`even`, `t1`.`four`, `t2`.`four`, `t1`.`ten`, `t2`.`ten`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`odd`, `t2`.`odd`, `t1`.`two`, `t2`.`two`, `t1`.`stringu1`, `t2`.`stringu1`, `t1`.`twenty`, `t2`.`twenty`, `t1`.`string4`, `t2`.`string4`, `t1`.`stringu2`, `t2`.`stringu2`, `t1`.`tenthous`, `t2`.`tenthous`, `t1`.`thousand`, `t2`.`thousand`, `t1`.`fivethous`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`uunique1`","objectList":"`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`even`, `t2`.`even`, `t1`.`four`, `t2`.`four`, `t1`.`ten`, `t2`.`ten`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`odd`, `t2`.`odd`, `t1`.`two`, `t2`.`two`, `t1`.`stringu1`, `t2`.`stringu1`, `t1`.`twenty`, `t2`.`twenty`, `t1`.`string4`, `t2`.`string4`, `t1`.`stringu2`, `t2`.`stringu2`, `t1`.`tenthous`, `t2`.`tenthous`, `t1`.`thousand`, `t2`.`thousand`, `t1`.`fivethous`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`"},"queryContext":[]} -- !query @@ -3484,7 +3484,7 @@ select f1,g from int4_tbl a, (select f1 as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`f1`",""],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`f1`","objectList":""},"queryContext":[]} -- !query @@ -3493,7 +3493,7 @@ select f1,g from int4_tbl a, (select a.f1 as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`.`f1`",""],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`a`.`f1`","objectList":""},"queryContext":[]} -- !query @@ -3502,7 +3502,7 @@ select f1,g from int4_tbl a cross join (select f1 as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`f1`",""],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`f1`","objectList":""},"queryContext":[]} -- !query @@ -3511,7 +3511,7 @@ select f1,g from int4_tbl a cross join (select a.f1 as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`.`f1`",""],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`a`.`f1`","objectList":""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out index e9ef90d68f5e2..c1c8cc56b5276 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out @@ -131,7 +131,7 @@ select * from int8_tbl limit (case when random() < 0.5 then bigint(null) end) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The limit expression must evaluate to a constant value, but got CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN CAST(NULL AS BIGINT) END"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The limit expression must evaluate to a constant value, but got CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN CAST(NULL AS BIGINT) END"},"queryContext":[]} -- !query @@ -140,7 +140,7 @@ select * from int8_tbl offset (case when random() < 0.5 then bigint(null) end) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The offset expression must evaluate to a constant value, but got CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN CAST(NULL AS BIGINT) END"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The offset expression must evaluate to a constant value, but got CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN CAST(NULL AS BIGINT) END"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out index 0dd8f27794f20..ea315327c73b2 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out @@ -3581,7 +3581,7 @@ INSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.423308199106402476 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\ndecimal can only support precision up to 38\n== SQL ==\nINSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.42330819910640247627)"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\ndecimal can only support precision up to 38\n== SQL ==\nINSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.42330819910640247627)"},"queryContext":[]} -- !query @@ -3824,7 +3824,7 @@ INSERT INTO num_result SELECT t1.id, t2.id, t1.val, t2.val, t1.val * t2.val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["`spark_catalog`.`default`.`num_result` requires that the data to be inserted have the same number of columns as the target table: target table has 3 column(s) but the inserted data has 5 column(s), including 0 partition column(s) having constant value(s)."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"`spark_catalog`.`default`.`num_result` requires that the data to be inserted have the same number of columns as the target table: target table has 3 column(s) but the inserted data has 5 column(s), including 0 partition column(s) having constant value(s)."},"queryContext":[]} -- !query @@ -4681,7 +4681,7 @@ SELECT '' AS to_number_1, to_number('-34,338,492', '99G999G999') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["The input string '-34,338,492' does not match the given number format: '99G999G999'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The input string '-34,338,492' does not match the given number format: '99G999G999'"},"queryContext":[]} -- !query @@ -4690,7 +4690,7 @@ SELECT '' AS to_number_2, to_number('-34,338,492.654,878', '99G999G999D999G999' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'to_number('-34,338,492.654,878', '99G999G999D999G999')' due to data type mismatch: Thousands separators (, or G) may not appear after the decimal point in the number format: '99G999G999D999G999'; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'to_number('-34,338,492.654,878', '99G999G999D999G999')' due to data type mismatch: Thousands separators (, or G) may not appear after the decimal point in the number format: '99G999G999D999G999'; line 1 pos 27"},"queryContext":[]} -- !query @@ -4739,7 +4739,7 @@ SELECT '' AS to_number_15, to_number('123,000','999G') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'to_number('123,000', '999G')' due to data type mismatch: Thousands separators (, or G) must have digits in between them in the number format: '999G'; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'to_number('123,000', '999G')' due to data type mismatch: Thousands separators (, or G) must have digits in between them in the number format: '999G'; line 1 pos 27"},"queryContext":[]} -- !query @@ -4748,7 +4748,7 @@ SELECT '' AS to_number_16, to_number('123456','999G999') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["The input string '123456' does not match the given number format: '999G999'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The input string '123456' does not match the given number format: '999G999'"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out index 00d634b521403..998bc469ca7c3 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out @@ -140,7 +140,7 @@ SELECT a FROM test_having HAVING min(a) < max(a) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'spark_catalog.default.test_having.a' is not an aggregate function. Wrap '(min(spark_catalog.default.test_having.a) AS `min(a#x)`, max(spark_catalog.default.test_having.a) AS `max(a#x)`)' in windowing function(s) or wrap 'spark_catalog.default.test_having.a' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"grouping expressions sequence is empty, and 'spark_catalog.default.test_having.a' is not an aggregate function. Wrap '(min(spark_catalog.default.test_having.a) AS `min(a#x)`, max(spark_catalog.default.test_having.a) AS `max(a#x)`)' in windowing function(s) or wrap 'spark_catalog.default.test_having.a' in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -149,7 +149,7 @@ SELECT 1 AS one FROM test_having HAVING a > 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`","`one`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`a`","objectList":"`one`"},"queryContext":[]} -- !query @@ -174,7 +174,7 @@ SELECT 1 AS one FROM test_having WHERE 1/a = 1 HAVING 1 < 2 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":39,"stopIndex":41,"fragment":"1/"}]} +{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":{"config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":39,"stopIndex":41,"fragment":"1/"}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out index 3feb31964fbab..f2e2ce43a941c 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out @@ -119,7 +119,7 @@ SELECT count(*) FROM test_missing_target GROUP BY a ORDER BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`b`","`count(1)`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`b`","objectList":"`count(1)`"},"queryContext":[]} -- !query @@ -200,7 +200,7 @@ SELECT c, count(*) FROM test_missing_target GROUP BY 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 53"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 53"},"queryContext":[]} -- !query @@ -211,7 +211,7 @@ SELECT count(*) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 10"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 10"},"queryContext":[]} -- !query @@ -324,7 +324,7 @@ SELECT count(a) FROM test_missing_target GROUP BY a ORDER BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`b`","`count(a)`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`b`","objectList":"`count(a)`"},"queryContext":[]} -- !query @@ -387,7 +387,7 @@ SELECT count(x.a) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 10"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 10"},"queryContext":[]} -- !query @@ -411,7 +411,7 @@ SELECT count(b) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'b' is ambiguous, could be: x.b, y.b.; line 1 pos 13"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'b' is ambiguous, could be: x.b, y.b.; line 1 pos 13"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out index 874b5281c987f..5b600269124d6 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out @@ -443,7 +443,7 @@ SELECT 'maca' LIKE 'm%aca' ESCAPE '%' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["the pattern 'm%aca' is invalid, the escape character is not allowed to precede 'a'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"the pattern 'm%aca' is invalid, the escape character is not allowed to precede 'a'"},"queryContext":[]} -- !query @@ -452,7 +452,7 @@ SELECT 'maca' NOT LIKE 'm%aca' ESCAPE '%' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["the pattern 'm%aca' is invalid, the escape character is not allowed to precede 'a'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"the pattern 'm%aca' is invalid, the escape character is not allowed to precede 'a'"},"queryContext":[]} -- !query @@ -461,7 +461,7 @@ SELECT 'ma%a' LIKE 'm%a%%a' ESCAPE '%' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["the pattern 'm%a%%a' is invalid, the escape character is not allowed to precede 'a'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"the pattern 'm%a%%a' is invalid, the escape character is not allowed to precede 'a'"},"queryContext":[]} -- !query @@ -470,7 +470,7 @@ SELECT 'ma%a' NOT LIKE 'm%a%%a' ESCAPE '%' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["the pattern 'm%a%%a' is invalid, the escape character is not allowed to precede 'a'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"the pattern 'm%a%%a' is invalid, the escape character is not allowed to precede 'a'"},"queryContext":[]} -- !query @@ -479,7 +479,7 @@ SELECT 'bear' LIKE 'b_ear' ESCAPE '_' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["the pattern 'b_ear' is invalid, the escape character is not allowed to precede 'e'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"the pattern 'b_ear' is invalid, the escape character is not allowed to precede 'e'"},"queryContext":[]} -- !query @@ -488,7 +488,7 @@ SELECT 'bear' NOT LIKE 'b_ear' ESCAPE '_' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["the pattern 'b_ear' is invalid, the escape character is not allowed to precede 'e'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"the pattern 'b_ear' is invalid, the escape character is not allowed to precede 'e'"},"queryContext":[]} -- !query @@ -497,7 +497,7 @@ SELECT 'be_r' LIKE 'b_e__r' ESCAPE '_' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["the pattern 'b_e__r' is invalid, the escape character is not allowed to precede 'e'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"the pattern 'b_e__r' is invalid, the escape character is not allowed to precede 'e'"},"queryContext":[]} -- !query @@ -506,7 +506,7 @@ SELECT 'be_r' NOT LIKE 'b_e__r' ESCAPE '_' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["the pattern 'b_e__r' is invalid, the escape character is not allowed to precede 'e'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"the pattern 'b_e__r' is invalid, the escape character is not allowed to precede 'e'"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out index 7f777e0d65d29..c924a16f5a188 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out @@ -62,7 +62,7 @@ select string('four: ') || 2+2 struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'four: 2'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"string('four: ') || 2+"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'four: 2'","sourceType":"\"STRING\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":29,"fragment":"string('four: ') || 2+"}]} -- !query @@ -71,7 +71,7 @@ select 'four: ' || 2+2 struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'four: 2'","\"STRING\"","\"BIGINT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"'four: ' || 2+"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'four: 2'","sourceType":"\"STRING\"","targetType":"\"BIGINT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":7,"stopIndex":21,"fragment":"'four: ' || 2+"}]} -- !query @@ -216,7 +216,7 @@ select format_string('Hello %s %s', 'World') struct<> -- !query output java.util.MissingFormatArgumentException -{"errorClass":"legacy","messageParameters":["Format specifier '%s'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Format specifier '%s'"},"queryContext":[]} -- !query @@ -225,7 +225,7 @@ select format_string('Hello %s') struct<> -- !query output java.util.MissingFormatArgumentException -{"errorClass":"legacy","messageParameters":["Format specifier '%s'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Format specifier '%s'"},"queryContext":[]} -- !query @@ -258,7 +258,7 @@ select format_string('%1$s %4$s', 1, 2, 3) struct<> -- !query output java.util.MissingFormatArgumentException -{"errorClass":"legacy","messageParameters":["Format specifier '%4$s'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Format specifier '%4$s'"},"queryContext":[]} -- !query @@ -267,7 +267,7 @@ select format_string('%1$s %13$s', 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) struct<> -- !query output java.util.MissingFormatArgumentException -{"errorClass":"legacy","messageParameters":["Format specifier '%13$s'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Format specifier '%13$s'"},"queryContext":[]} -- !query @@ -276,7 +276,7 @@ select format_string('%0$s', 'Hello') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"INVALID_PARAMETER_VALUE","sqlState":"22023","messageParameters":["strfmt","`format_string`","expects %1$, %2$ and so on, but got %0$."],"queryContext":[]} +{"errorClass":"INVALID_PARAMETER_VALUE","sqlState":"22023","messageParameters":{"parameter":"strfmt","functionName":"`format_string`","expected":"expects %1$, %2$ and so on, but got %0$."},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out index dfa5150ce4fb5..0bd3ae9f4d08d 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/union.sql.out @@ -76,7 +76,7 @@ SELECT 1 AS three UNION SELECT 2 UNION SELECT 3 ORDER BY 1 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'SELECT'","hint":""},"queryContext":[]} -- !query @@ -85,7 +85,7 @@ SELECT 1 AS two UNION SELECT 2 UNION SELECT 2 ORDER BY 1 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'SELECT'","hint":""},"queryContext":[]} -- !query @@ -157,7 +157,7 @@ SELECT 1.1 AS three UNION SELECT 2 UNION SELECT 3 ORDER BY 1 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'SELECT'","hint":""},"queryContext":[]} -- !query @@ -166,7 +166,7 @@ SELECT double(1.1) AS two UNION SELECT 2 UNION SELECT double(2.0) ORDER BY 1 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'SELECT'","hint":""},"queryContext":[]} -- !query @@ -357,7 +357,7 @@ struct struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'SELECT'","hint":""},"queryContext":[]} -- !query @@ -366,7 +366,7 @@ org.apache.spark.sql.catalyst.parser.ParseException struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'SELECT'","hint":""},"queryContext":[]} -- !query @@ -375,7 +375,7 @@ org.apache.spark.sql.catalyst.parser.ParseException struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'SELECT'","hint":""},"queryContext":[]} -- !query @@ -384,7 +384,7 @@ org.apache.spark.sql.catalyst.parser.ParseException struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'SELECT'","hint":""},"queryContext":[]} -- !query @@ -525,7 +525,7 @@ SELECT q1 FROM int8_tbl EXCEPT SELECT q2 FROM int8_tbl ORDER BY q2 LIMIT 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`q2`","`int8_tbl`.`q1`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`q2`","objectList":"`int8_tbl`.`q1`"},"queryContext":[]} -- !query @@ -684,7 +684,7 @@ SELECT cast('3.4' as decimal(38, 18)) UNION SELECT 'foo' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'SELECT'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'SELECT'","hint":""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out index 8c0e5c2ad4986..562a22520a847 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out @@ -222,7 +222,7 @@ from range(9223372036854775804, 9223372036854775807) x struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["long overflow"," Use 'try_add' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":-1,"stopIndex":-1,"fragment":""}]} +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"long overflow","alternative":" Use 'try_add' to tolerate overflow and return NULL instead.","config":"spark.sql.ansi.enabled"},"queryContext":[{"objectType":"","objectName":"","startIndex":-1,"stopIndex":-1,"fragment":""}]} -- !query @@ -232,7 +232,7 @@ from range(-9223372036854775806, -9223372036854775805) x struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":["long overflow"," Use 'try_add' to tolerate overflow and return NULL instead.","spark.sql.ansi.enabled"],"queryContext":[{"objectType":"","objectName":"","startIndex":-1,"stopIndex":-1,"fragment":""}]} +{"errorClass":"ARITHMETIC_OVERFLOW","sqlState":"22003","messageParameters":{"message":"long overflow","alternative":" Use 'try_add' to tolerate overflow and return NULL instead.","config":"spark.sql.ansi.enabled"},"queryContext":[{"objectType":"","objectName":"","startIndex":-1,"stopIndex":-1,"fragment":""}]} -- !query @@ -459,7 +459,7 @@ window w as (order by f_numeric range between struct<> -- !query output org.apache.spark.SparkNumberFormatException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'NaN'","\"STRING\"","\"INT\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":82,"stopIndex":162,"fragment":"(order by f_numeric range between\n 1.1 preceding and 'NaN' following"}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'NaN'","sourceType":"\"STRING\"","targetType":"\"INT\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":82,"stopIndex":162,"fragment":"(order by f_numeric range between\n 1.1 preceding and 'NaN' following"}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out index 41696d761a8cc..d7cd1b989229d 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out @@ -69,7 +69,7 @@ insert into datetimes values struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): [CAST_INVALID_INPUT] The value '11:00 BST' of the type \"STRING\" cannot be cast to \"TIMESTAMP\" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.\n== SQL(line 2, position 24) ==\n(1, timestamp '11:00', cast ('11:00 BST' as timestamp), cast ('1 year' as timestamp), ...\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n; line 1 pos 22"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): [CAST_INVALID_INPUT] The value '11:00 BST' of the type \"STRING\" cannot be cast to \"TIMESTAMP\" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.\n== SQL(line 2, position 24) ==\n(1, timestamp '11:00', cast ('11:00 BST' as timestamp), cast ('1 year' as timestamp), ...\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n; line 1 pos 22"},"queryContext":[]} -- !query @@ -242,7 +242,7 @@ from t1 where f1 = f2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY spark_catalog.default.t1.f1 RANGE BETWEEN 1 PRECEDING AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 24"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(PARTITION BY spark_catalog.default.t1.f1 RANGE BETWEEN 1 PRECEDING AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 24"},"queryContext":[]} -- !query @@ -292,7 +292,7 @@ SELECT * FROM empsalary WHERE row_number() OVER (ORDER BY salary) < 10 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["It is not allowed to use window functions inside WHERE clause"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"It is not allowed to use window functions inside WHERE clause"},"queryContext":[]} -- !query @@ -301,7 +301,7 @@ SELECT * FROM empsalary INNER JOIN tenk1 ON row_number() OVER (ORDER BY salary) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["\nThe query operator `Join` contains one or more unsupported\nexpression types Aggregate, Window or Generate.\nInvalid expressions: [row_number() OVER (ORDER BY spark_catalog.default.empsalary.salary ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nThe query operator `Join` contains one or more unsupported\nexpression types Aggregate, Window or Generate.\nInvalid expressions: [row_number() OVER (ORDER BY spark_catalog.default.empsalary.salary ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]"},"queryContext":[]} -- !query @@ -310,7 +310,7 @@ SELECT rank() OVER (ORDER BY 1), count(*) FROM empsalary GROUP BY 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["\nThe query operator `Aggregate` contains one or more unsupported\nexpression types Aggregate, Window or Generate.\nInvalid expressions: [RANK() OVER (ORDER BY 1 ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nThe query operator `Aggregate` contains one or more unsupported\nexpression types Aggregate, Window or Generate.\nInvalid expressions: [RANK() OVER (ORDER BY 1 ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]"},"queryContext":[]} -- !query @@ -319,7 +319,7 @@ SELECT * FROM rank() OVER (ORDER BY random()) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'BY'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'BY'","hint":""},"queryContext":[]} -- !query @@ -328,7 +328,7 @@ SELECT * FROM empsalary WHERE (rank() OVER (ORDER BY random())) > 10 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["It is not allowed to use window functions inside WHERE clause"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"It is not allowed to use window functions inside WHERE clause"},"queryContext":[]} -- !query @@ -337,7 +337,7 @@ SELECT * FROM empsalary WHERE rank() OVER (ORDER BY random()) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["It is not allowed to use window functions inside WHERE clause"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"It is not allowed to use window functions inside WHERE clause"},"queryContext":[]} -- !query @@ -346,7 +346,7 @@ select rank() OVER (PARTITION BY four, ORDER BY ten) FROM tenk1 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'BY'",": extra input 'BY'"],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'BY'","hint":": extra input 'BY'"},"queryContext":[]} -- !query @@ -355,7 +355,7 @@ SELECT range(1, 100) OVER () FROM empsalary struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Undefined function: range. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.range.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Undefined function: range. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.range.; line 1 pos 7"},"queryContext":[]} -- !query @@ -364,7 +364,7 @@ SELECT ntile(0) OVER (ORDER BY ten), ten, four FROM tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'ntile(0)' due to data type mismatch: Buckets expression must be positive, but got: 0; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'ntile(0)' due to data type mismatch: Buckets expression must be positive, but got: 0; line 1 pos 7"},"queryContext":[]} -- !query @@ -373,7 +373,7 @@ SELECT nth_value(four, 0) OVER (ORDER BY ten), ten, four FROM tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'nth_value(spark_catalog.default.tenk1.four, 0)' due to data type mismatch: The 'offset' argument of nth_value must be greater than zero but it is 0.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'nth_value(spark_catalog.default.tenk1.four, 0)' due to data type mismatch: The 'offset' argument of nth_value must be greater than zero but it is 0.; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out index f430b2759ccb3..b7c664ccac94f 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out @@ -498,4 +498,4 @@ FROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["failed to evaluate expression CAST('nan' AS INT): [CAST_INVALID_INPUT] The value 'nan' of the type \"STRING\" cannot be cast to \"INT\" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.\n== SQL(line 3, position 29) ==\nFROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b)\n ^^^^^^^^^^^^^^^^^^\n; line 3 pos 6"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"failed to evaluate expression CAST('nan' AS INT): [CAST_INVALID_INPUT] The value 'nan' of the type \"STRING\" cannot be cast to \"INT\" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.\n== SQL(line 3, position 29) ==\nFROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b)\n ^^^^^^^^^^^^^^^^^^\n; line 3 pos 6"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out index 1ed01dec2522e..bdfc1ce474b9f 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out @@ -216,7 +216,7 @@ SELECT * FROM outermost ORDER BY 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table or view not found: outermost; line 4 pos 23"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view not found: outermost; line 4 pos 23"},"queryContext":[]} -- !query @@ -346,7 +346,7 @@ create table foo (with baz) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nDataType baz is not supported.(line 1, pos 23)\n\n== SQL ==\ncreate table foo (with baz)\n-----------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nDataType baz is not supported.(line 1, pos 23)\n\n== SQL ==\ncreate table foo (with baz)\n-----------------------^^^\n"},"queryContext":[]} -- !query @@ -355,7 +355,7 @@ create table foo (with ordinality) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nDataType ordinality is not supported.(line 1, pos 23)\n\n== SQL ==\ncreate table foo (with ordinality)\n-----------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nDataType ordinality is not supported.(line 1, pos 23)\n\n== SQL ==\ncreate table foo (with ordinality)\n-----------------------^^^\n"},"queryContext":[]} -- !query @@ -372,7 +372,7 @@ WITH test AS (SELECT 42) INSERT INTO test VALUES (1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table not found: test; line 1 pos 37"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table not found: test; line 1 pos 37"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/query_regex_column.sql.out b/sql/core/src/test/resources/sql-tests/results/query_regex_column.sql.out index 5de34d4ef8189..b4adb5901d96e 100644 --- a/sql/core/src/test/resources/sql-tests/results/query_regex_column.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/query_regex_column.sql.out @@ -33,7 +33,7 @@ SELECT `(a)?+.+` FROM testData2 WHERE a = 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a)?+.+`","`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`(a)?+.+`","objectList":"`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"},"queryContext":[]} -- !query @@ -42,7 +42,7 @@ SELECT t.`(a)?+.+` FROM testData2 t WHERE a = 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t`.`(a)?+.+`","`t`.`A`, `t`.`B`, `t`.`c`, `t`.`d`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`t`.`(a)?+.+`","objectList":"`t`.`A`, `t`.`B`, `t`.`c`, `t`.`d`"},"queryContext":[]} -- !query @@ -51,7 +51,7 @@ SELECT `(a|b)` FROM testData2 WHERE a = 2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a|b)`","`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`(a|b)`","objectList":"`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"},"queryContext":[]} -- !query @@ -60,7 +60,7 @@ SELECT `(a|b)?+.+` FROM testData2 WHERE a = 2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a|b)?+.+`","`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`(a|b)?+.+`","objectList":"`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"},"queryContext":[]} -- !query @@ -69,7 +69,7 @@ SELECT SUM(`(a|b)?+.+`) FROM testData2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a|b)?+.+`","`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`(a|b)?+.+`","objectList":"`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"},"queryContext":[]} -- !query @@ -78,7 +78,7 @@ SELECT SUM(`(a)`) FROM testData2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a)`","`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`(a)`","objectList":"`testdata2`.`A`, `testdata2`.`B`, `testdata2`.`c`, `testdata2`.`d`"},"queryContext":[]} -- !query @@ -298,7 +298,7 @@ SELECT SUM(a) FROM testdata3 GROUP BY `(a)` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a)`","`testdata3`.`a`, `testdata3`.`b`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`(a)`","objectList":"`testdata3`.`a`, `testdata3`.`b`"},"queryContext":[]} -- !query @@ -307,4 +307,4 @@ SELECT SUM(a) FROM testdata3 GROUP BY `(a)?+.+` struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`(a)?+.+`","`testdata3`.`a`, `testdata3`.`b`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`(a)?+.+`","objectList":"`testdata3`.`a`, `testdata3`.`b`"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/random.sql.out b/sql/core/src/test/resources/sql-tests/results/random.sql.out index f83db80b17b7a..8c9302cb31d5b 100644 --- a/sql/core/src/test/resources/sql-tests/results/random.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/random.sql.out @@ -37,7 +37,7 @@ SELECT rand(1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'rand(1.0BD)' due to data type mismatch: argument 1 requires (int or bigint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'rand(1.0BD)' due to data type mismatch: argument 1 requires (int or bigint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -78,4 +78,4 @@ SELECT rand('1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'rand('1')' due to data type mismatch: argument 1 requires (int or bigint) type, however, ''1'' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'rand('1')' due to data type mismatch: argument 1 requires (int or bigint) type, however, ''1'' is of string type.; line 1 pos 7"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out index a29b67d11083e..5957fa14ba179 100644 --- a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out @@ -5,7 +5,7 @@ SELECT regexp_extract('1a 2b 14m', '\\d+') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Regex group count is 0, but the specified group index is 1"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Regex group count is 0, but the specified group index is 1"},"queryContext":[]} -- !query @@ -22,7 +22,7 @@ SELECT regexp_extract('1a 2b 14m', '\\d+', 1) struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Regex group count is 0, but the specified group index is 1"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Regex group count is 0, but the specified group index is 1"},"queryContext":[]} -- !query @@ -31,7 +31,7 @@ SELECT regexp_extract('1a 2b 14m', '\\d+', 2) struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Regex group count is 0, but the specified group index is 2"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Regex group count is 0, but the specified group index is 2"},"queryContext":[]} -- !query @@ -40,7 +40,7 @@ SELECT regexp_extract('1a 2b 14m', '\\d+', -1) struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["The specified group index cannot be less than zero"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The specified group index cannot be less than zero"},"queryContext":[]} -- !query @@ -97,7 +97,7 @@ SELECT regexp_extract('1a 2b 14m', '(\\d+)([a-z]+)', 3) struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Regex group count is 2, but the specified group index is 3"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Regex group count is 2, but the specified group index is 3"},"queryContext":[]} -- !query @@ -106,7 +106,7 @@ SELECT regexp_extract('1a 2b 14m', '(\\d+)([a-z]+)', -1) struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["The specified group index cannot be less than zero"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The specified group index cannot be less than zero"},"queryContext":[]} -- !query @@ -131,7 +131,7 @@ SELECT regexp_extract('1a 2b 14m', '(?l)') struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"INVALID_PARAMETER_VALUE","sqlState":"22023","messageParameters":["regexp","`regexp_extract`","(?l)"],"queryContext":[]} +{"errorClass":"INVALID_PARAMETER_VALUE","sqlState":"22023","messageParameters":{"parameter":"regexp","functionName":"`regexp_extract`","expected":"(?l)"},"queryContext":[]} -- !query @@ -140,7 +140,7 @@ SELECT regexp_extract_all('1a 2b 14m', '\\d+') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Regex group count is 0, but the specified group index is 1"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Regex group count is 0, but the specified group index is 1"},"queryContext":[]} -- !query @@ -157,7 +157,7 @@ SELECT regexp_extract_all('1a 2b 14m', '\\d+', 1) struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Regex group count is 0, but the specified group index is 1"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Regex group count is 0, but the specified group index is 1"},"queryContext":[]} -- !query @@ -166,7 +166,7 @@ SELECT regexp_extract_all('1a 2b 14m', '\\d+', 2) struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Regex group count is 0, but the specified group index is 2"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Regex group count is 0, but the specified group index is 2"},"queryContext":[]} -- !query @@ -175,7 +175,7 @@ SELECT regexp_extract_all('1a 2b 14m', '\\d+', -1) struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["The specified group index cannot be less than zero"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The specified group index cannot be less than zero"},"queryContext":[]} -- !query @@ -232,7 +232,7 @@ SELECT regexp_extract_all('1a 2b 14m', '(\\d+)([a-z]+)', 3) struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Regex group count is 2, but the specified group index is 3"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Regex group count is 2, but the specified group index is 3"},"queryContext":[]} -- !query @@ -241,7 +241,7 @@ SELECT regexp_extract_all('1a 2b 14m', '(\\d+)([a-z]+)', -1) struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["The specified group index cannot be less than zero"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The specified group index cannot be less than zero"},"queryContext":[]} -- !query @@ -266,7 +266,7 @@ SELECT regexp_extract_all('abc', col0, 1) FROM VALUES('], [') AS t(col0) struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"INVALID_PARAMETER_VALUE","sqlState":"22023","messageParameters":["regexp","`regexp_extract_all`","], ["],"queryContext":[]} +{"errorClass":"INVALID_PARAMETER_VALUE","sqlState":"22023","messageParameters":{"parameter":"regexp","functionName":"`regexp_extract_all`","expected":"], ["},"queryContext":[]} -- !query @@ -283,7 +283,7 @@ SELECT regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something', -2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'regexp_replace('healthy, wealthy, and wise', '\\\\w+thy', 'something', -2)' due to data type mismatch: Position expression must be positive, but got: -2; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'regexp_replace('healthy, wealthy, and wise', '\\\\w+thy', 'something', -2)' due to data type mismatch: Position expression must be positive, but got: -2; line 1 pos 7"},"queryContext":[]} -- !query @@ -292,7 +292,7 @@ SELECT regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something', 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'regexp_replace('healthy, wealthy, and wise', '\\\\w+thy', 'something', 0)' due to data type mismatch: Position expression must be positive, but got: 0; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'regexp_replace('healthy, wealthy, and wise', '\\\\w+thy', 'something', 0)' due to data type mismatch: Position expression must be positive, but got: 0; line 1 pos 7"},"queryContext":[]} -- !query @@ -565,4 +565,4 @@ SELECT regexp_instr('abc', col0, 1) FROM VALUES(') ?') AS t(col0) struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"INVALID_PARAMETER_VALUE","sqlState":"22023","messageParameters":["regexp","`regexp_instr`",") ?"],"queryContext":[]} +{"errorClass":"INVALID_PARAMETER_VALUE","sqlState":"22023","messageParameters":{"parameter":"regexp","functionName":"`regexp_instr`","expected":") ?"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out index d6b950766a55e..2da2391a374f4 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out @@ -166,7 +166,7 @@ SHOW TABLE EXTENDED struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["end of input",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"end of input","hint":""},"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SHOW TABLE EXTENDED PARTITION(c='Us', d=1) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'PARTITION'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'PARTITION'","hint":""},"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SHOW TABLE EXTENDED LIKE 'show_t*' PARTITION(c='Us', d=1) struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchTableException -{"errorClass":"legacy","messageParameters":["Table or view 'show_t*' not found in database 'showdb'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view 'show_t*' not found in database 'showdb'"},"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`showdb`.`show_t1`'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`showdb`.`show_t1`'"},"queryContext":[]} -- !query @@ -213,7 +213,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(a='Us', d=1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["a is not a valid partition column in table `spark_catalog`.`showdb`.`show_t1`."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"a is not a valid partition column in table `spark_catalog`.`showdb`.`show_t1`."},"queryContext":[]} -- !query @@ -222,7 +222,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Ch', d=1) struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException -{"errorClass":"legacy","messageParameters":["Partition not found in table 'show_t1' database 'showdb':\nc -> Ch\nd -> 1"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Partition not found in table 'show_t1' database 'showdb':\nc -> Ch\nd -> 1"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/show-views.sql.out b/sql/core/src/test/resources/sql-tests/results/show-views.sql.out index 3ccf752ea8274..eb61dd166ebd3 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-views.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-views.sql.out @@ -139,7 +139,7 @@ SHOW VIEWS IN wrongdb LIKE 'view_*' struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException -{"errorClass":"legacy","messageParameters":["Database 'wrongdb' not found"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Database 'wrongdb' not found"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out index 7eb0be70cda30..a3f5bfe36a05f 100644 --- a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out @@ -91,7 +91,7 @@ SHOW COLUMNS IN badtable FROM showdb struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table or view not found: showdb.badtable; line 1 pos 16"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view not found: showdb.badtable; line 1 pos 16"},"queryContext":[]} -- !query @@ -109,7 +109,7 @@ SHOW COLUMNS IN showdb.showcolumn1 FROM baddb struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["SHOW COLUMNS with conflicting databases: 'baddb' != 'showdb'"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"SHOW COLUMNS with conflicting databases: 'baddb' != 'showdb'"},"queryContext":[]} -- !query @@ -127,7 +127,7 @@ SHOW COLUMNS IN showdb.showcolumn3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table or view not found: showdb.showcolumn3; line 1 pos 16"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view not found: showdb.showcolumn3; line 1 pos 16"},"queryContext":[]} -- !query @@ -136,7 +136,7 @@ SHOW COLUMNS IN showcolumn3 FROM showdb struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table or view not found: showdb.showcolumn3; line 1 pos 16"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view not found: showdb.showcolumn3; line 1 pos 16"},"queryContext":[]} -- !query @@ -145,7 +145,7 @@ SHOW COLUMNS IN showcolumn4 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table or view not found: showcolumn4; line 1 pos 16"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table or view not found: showcolumn4; line 1 pos 16"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out index 6508db2918e94..523a521ecb4af 100644 --- a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out @@ -93,7 +93,7 @@ SELECT string(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Function string accepts only one argument; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Function string accepts only one argument; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out index b7c079d9cb123..8133b61042f89 100644 --- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out @@ -5,7 +5,7 @@ select concat_ws() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["requirement failed: concat_ws requires at least one argument.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"requirement failed: concat_ws requires at least one argument.; line 1 pos 7"},"queryContext":[]} -- !query @@ -14,7 +14,7 @@ select format_string() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["requirement failed: format_string() should take at least 1 argument; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"requirement failed: format_string() should take at least 1 argument; line 1 pos 7"},"queryContext":[]} -- !query @@ -167,7 +167,7 @@ SELECT split_part('11.12.13', '.', 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":{},"queryContext":[]} -- !query @@ -648,7 +648,7 @@ select decode() struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7"},"queryContext":[]} -- !query @@ -657,7 +657,7 @@ select decode(encode('abc', 'utf-8')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7"},"queryContext":[]} -- !query @@ -1122,7 +1122,7 @@ select to_binary(null, cast(null as int)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1131,7 +1131,7 @@ select to_binary('abc', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1140,7 +1140,7 @@ select to_binary('abc', 'invalidFormat') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid value for the 'format' parameter of function 'to_binary': invalidformat. The value has to be a case-insensitive string literal of 'hex', 'utf-8', or 'base64'."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid value for the 'format' parameter of function 'to_binary': invalidformat. The value has to be a case-insensitive string literal of 'hex', 'utf-8', or 'base64'."},"queryContext":[]} -- !query @@ -1149,4 +1149,4 @@ select to_binary('a!', 'base64') struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["Last unit does not have enough valid bits"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Last unit does not have enough valid bits"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out index 8024899ad273d..f16f365fe8150 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out @@ -38,7 +38,7 @@ select 1 from tab_a where (a1, b1) not in (select (a2, b2) from tab_b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(named_struct('a1', tab_a.a1, 'b1', tab_a.b1) IN (listquery()))' due to data type mismatch: \nThe number of columns in the left hand side of an IN subquery does not match the\nnumber of columns in the output of subquery.\n#columns in left hand side: 2.\n#columns in right hand side: 1.\nLeft side columns:\n[tab_a.a1, tab_a.b1].\nRight side columns:\n[`named_struct(a2, a2, b2, b2)`].; line 1 pos 35"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(named_struct('a1', tab_a.a1, 'b1', tab_a.b1) IN (listquery()))' due to data type mismatch: \nThe number of columns in the left hand side of an IN subquery does not match the\nnumber of columns in the output of subquery.\n#columns in left hand side: 2.\n#columns in right hand side: 1.\nLeft side columns:\n[tab_a.a1, tab_a.b1].\nRight side columns:\n[`named_struct(a2, a2, b2, b2)`].; line 1 pos 35"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out index a0872812ed417..5be9f62fb052b 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out @@ -43,7 +43,7 @@ AND t2b = (SELECT max(avg) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 't2.t2b' is not an aggregate function. Wrap '(avg(t2.t2b) AS avg)' in windowing function(s) or wrap 't2.t2b' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"grouping expressions sequence is empty, and 't2.t2b' is not an aggregate function. Wrap '(avg(t2.t2b) AS avg)' in windowing function(s) or wrap 't2.t2b' in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -60,7 +60,7 @@ WHERE t1a IN (SELECT min(t2a) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Resolved attribute(s) t2b#x missing from min(t2a)#x,t2c#x in operator !Filter t2c#x IN (list#x [t2b#x])."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Resolved attribute(s) t2b#x missing from min(t2a)#x,t2c#x in operator !Filter t2c#x IN (list#x [t2b#x])."},"queryContext":[]} -- !query @@ -75,7 +75,7 @@ HAVING EXISTS (SELECT t2a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: min((outer(t1.t1a) + t2.t2a))"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: min((outer(t1.t1a) + t2.t2a))"},"queryContext":[]} -- !query @@ -91,7 +91,7 @@ WHERE t1a IN (SELECT t2a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: min((outer(t2.t2a) + t3.t3a))"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: min((outer(t2.t2a) + t3.t3a))"},"queryContext":[]} -- !query @@ -105,7 +105,7 @@ WHERE t1a IN (SELECT t2a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Expressions referencing the outer query are not supported outside of WHERE/HAVING clauses:\nAggregate [min(outer(t2a#x)) AS min(outer(t2.t2a))#x]\n+- SubqueryAlias t3\n +- View (`t3`, [t3a#x,t3b#x,t3c#x])\n +- Project [cast(t3a#x as int) AS t3a#x, cast(t3b#x as int) AS t3b#x, cast(t3c#x as int) AS t3c#x]\n +- Project [t3a#x, t3b#x, t3c#x]\n +- SubqueryAlias t3\n +- LocalRelation [t3a#x, t3b#x, t3c#x]\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Expressions referencing the outer query are not supported outside of WHERE/HAVING clauses:\nAggregate [min(outer(t2a#x)) AS min(outer(t2.t2a))#x]\n+- SubqueryAlias t3\n +- View (`t3`, [t3a#x,t3b#x,t3c#x])\n +- Project [cast(t3a#x as int) AS t3a#x, cast(t3b#x as int) AS t3b#x, cast(t3c#x as int) AS t3c#x]\n +- Project [t3a#x, t3b#x, t3c#x]\n +- SubqueryAlias t3\n +- LocalRelation [t3a#x, t3b#x, t3c#x]\n"},"queryContext":[]} -- !query @@ -127,4 +127,4 @@ ON EXISTS (SELECT 1 FROM t2 WHERE t2a > t1a) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t1a`","`t2`.`t2a`, `t2`.`t2b`, `t2`.`t2c`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`t1a`","objectList":"`t2`.`t2a`, `t2`.`t2b`, `t2`.`t2c`"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out index 5ad574ad8e470..35573cc11fc56 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out @@ -61,7 +61,7 @@ FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Scalar subquery must return only one column, but got 2"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Scalar subquery must return only one column, but got 2"},"queryContext":[]} -- !query @@ -76,7 +76,7 @@ FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Scalar subquery must return only one column, but got 2"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Scalar subquery must return only one column, but got 2"},"queryContext":[]} -- !query @@ -89,7 +89,7 @@ t1a IN (SELECT t2a, t2b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(t1.t1a IN (listquery(t1.t1a)))' due to data type mismatch: \nThe number of columns in the left hand side of an IN subquery does not match the\nnumber of columns in the output of subquery.\n#columns in left hand side: 1.\n#columns in right hand side: 2.\nLeft side columns:\n[t1.t1a].\nRight side columns:\n[t2.t2a, t2.t2b].; line 3 pos 4"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(t1.t1a IN (listquery(t1.t1a)))' due to data type mismatch: \nThe number of columns in the left hand side of an IN subquery does not match the\nnumber of columns in the output of subquery.\n#columns in left hand side: 1.\n#columns in right hand side: 2.\nLeft side columns:\n[t1.t1a].\nRight side columns:\n[t2.t2a, t2.t2b].; line 3 pos 4"},"queryContext":[]} -- !query @@ -102,7 +102,7 @@ WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(named_struct('t1a', t1.t1a, 't1b', t1.t1b) IN (listquery(t1.t1a)))' due to data type mismatch: \nThe number of columns in the left hand side of an IN subquery does not match the\nnumber of columns in the output of subquery.\n#columns in left hand side: 2.\n#columns in right hand side: 1.\nLeft side columns:\n[t1.t1a, t1.t1b].\nRight side columns:\n[t2.t2a].; line 3 pos 11"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(named_struct('t1a', t1.t1a, 't1b', t1.t1b) IN (listquery(t1.t1a)))' due to data type mismatch: \nThe number of columns in the left hand side of an IN subquery does not match the\nnumber of columns in the output of subquery.\n#columns in left hand side: 2.\n#columns in right hand side: 1.\nLeft side columns:\n[t1.t1a, t1.t1b].\nRight side columns:\n[t2.t2a].; line 3 pos 11"},"queryContext":[]} -- !query @@ -116,4 +116,4 @@ WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(named_struct('t4a', t4.t4a, 't4b', t4.t4b, 't4c', t4.t4c) IN (listquery()))' due to data type mismatch: \nThe data type of one or more elements in the left hand side of an IN subquery\nis not compatible with the data type of the output of the subquery\nMismatched columns:\n[(t4.t4a:double, t5.t5a:timestamp), (t4.t4c:string, t5.t5c:bigint)]\nLeft side:\n[double, string, string].\nRight side:\n[timestamp, string, bigint].; line 3 pos 16"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(named_struct('t4a', t4.t4a, 't4b', t4.t4b, 't4c', t4.t4c) IN (listquery()))' due to data type mismatch: \nThe data type of one or more elements in the left hand side of an IN subquery\nis not compatible with the data type of the output of the subquery\nMismatched columns:\n[(t4.t4a:double, t5.t5a:timestamp), (t4.t4c:string, t5.t5c:bigint)]\nLeft side:\n[double, string, string].\nRight side:\n[timestamp, string, bigint].; line 3 pos 16"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out b/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out index 628309410aa80..9f99ada71b077 100644 --- a/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out @@ -39,7 +39,7 @@ SELECT * FROM testData AS t(col1, col2, col3) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Number of column aliases does not match number of columns. Number of column aliases: 3; number of columns: 2.; line 1 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Number of column aliases does not match number of columns. Number of column aliases: 3; number of columns: 2.; line 1 pos 14"},"queryContext":[]} -- !query @@ -48,7 +48,7 @@ SELECT * FROM testData AS t(col1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Number of column aliases does not match number of columns. Number of column aliases: 1; number of columns: 2.; line 1 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Number of column aliases does not match number of columns. Number of column aliases: 1; number of columns: 2.; line 1 pos 14"},"queryContext":[]} -- !query @@ -57,7 +57,7 @@ SELECT a AS col1, b AS col2 FROM testData AS t(c, d) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`","`t`.`c`, `t`.`d`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`a`","objectList":"`t`.`c`, `t`.`d`"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out index e4b07e3b4caef..47c243ebf6b01 100644 --- a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out @@ -5,7 +5,7 @@ select * from dummy(3) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["could not resolve `dummy` to a table-valued function; line 1 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"could not resolve `dummy` to a table-valued function; line 1 pos 14"},"queryContext":[]} -- !query @@ -67,7 +67,7 @@ select * from range(1, 1, 1, 1, 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (integer, integer, integer, integer, integer): Invalid number of arguments for function range. Expected: one of 1, 2, 3 and 4; Found: 5; line 1 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (integer, integer, integer, integer, integer): Invalid number of arguments for function range. Expected: one of 1, 2, 3 and 4; Found: 5; line 1 pos 14"},"queryContext":[]} -- !query @@ -76,7 +76,7 @@ select * from range(1, null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (integer, void): Incompatible input data type. Expected: long; Found: void; line 1 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (integer, void): Incompatible input data type. Expected: long; Found: void; line 1 pos 14"},"queryContext":[]} -- !query @@ -85,7 +85,7 @@ select * from range(array(1, 2, 3)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (array): Incompatible input data type. Expected: long; Found: array; line 1 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (array): Incompatible input data type. Expected: long; Found: array; line 1 pos 14"},"queryContext":[]} -- !query @@ -94,7 +94,7 @@ select * from range(0, 5, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (integer, integer, integer): requirement failed: step (0) cannot be 0; line 1 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (integer, integer, integer): requirement failed: step (0) cannot be 0; line 1 pos 14"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out b/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out index 85e84ef2e0231..247effde581c3 100644 --- a/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out @@ -29,7 +29,7 @@ SELECT mydb1.t1 FROM t1 TABLESAMPLE (-1 PERCENT) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nSampling fraction (-0.01) must be on interval [0, 1](line 1, pos 24)\n\n== SQL ==\nSELECT mydb1.t1 FROM t1 TABLESAMPLE (-1 PERCENT)\n------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nSampling fraction (-0.01) must be on interval [0, 1](line 1, pos 24)\n\n== SQL ==\nSELECT mydb1.t1 FROM t1 TABLESAMPLE (-1 PERCENT)\n------------------------^^^\n"},"queryContext":[]} -- !query @@ -38,7 +38,7 @@ SELECT mydb1.t1 FROM t1 TABLESAMPLE (101 PERCENT) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nSampling fraction (1.01) must be on interval [0, 1](line 1, pos 24)\n\n== SQL ==\nSELECT mydb1.t1 FROM t1 TABLESAMPLE (101 PERCENT)\n------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nSampling fraction (1.01) must be on interval [0, 1](line 1, pos 24)\n\n== SQL ==\nSELECT mydb1.t1 FROM t1 TABLESAMPLE (101 PERCENT)\n------------------------^^^\n"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out index 473d2d94755d6..8cc767937db34 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out @@ -45,7 +45,7 @@ SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 45.678, 'CET') struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function make_timestamp_ntz. Expected: 6; Found: 7; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function make_timestamp_ntz. Expected: 6; Found: 7; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out index 0eb6f010611a2..06178beb5fb03 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out @@ -13,7 +13,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"},"queryContext":[]} -- !query @@ -22,7 +22,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"},"queryContext":[]} -- !query @@ -31,7 +31,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"},"queryContext":[]} -- !query @@ -176,7 +176,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -185,7 +185,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -194,7 +194,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -203,7 +203,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -212,7 +212,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Rounding necessary"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Rounding necessary"},"queryContext":[]} -- !query @@ -590,7 +590,7 @@ select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -599,7 +599,7 @@ select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -632,7 +632,7 @@ select str - timestamp'2011-11-11 11:11:11' from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(ts_view.str - TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(ts_view.str - TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -641,7 +641,7 @@ select timestamp'2011-11-11 11:11:11' - str from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -650,7 +650,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -659,7 +659,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' (double and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' (double and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -668,7 +668,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7"},"queryContext":[]} -- !query @@ -677,7 +677,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -709,7 +709,7 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'yyyy-MM-dd GGGGG'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'yyyy-MM-dd GGGGG'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -718,7 +718,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd MM yyyy EEEEEE'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -727,7 +727,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd MM yyyy EEEEE'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -736,7 +736,7 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd MM yyyy EEEEE'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -745,7 +745,7 @@ select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat' struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd/MMMMM/yyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -754,7 +754,7 @@ select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMM struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd/MMMMM/yyyy'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd/MMMMM/yyyy'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index 04535ca2f1836..70de313430f9e 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -13,7 +13,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"},"queryContext":[]} -- !query @@ -22,7 +22,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"},"queryContext":[]} -- !query @@ -31,7 +31,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"},"queryContext":[]} -- !query @@ -80,7 +80,7 @@ SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007) struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"INVALID_FRACTION_OF_SECOND","sqlState":"22023","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"INVALID_FRACTION_OF_SECOND","sqlState":"22023","messageParameters":{"ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -105,7 +105,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 61) struct<> -- !query output java.time.DateTimeException -{"errorClass":"legacy","messageParameters":["Invalid value for SecondOfMinute (valid values 0 - 59): 61. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid value for SecondOfMinute (valid values 0 - 59): 61. If necessary set spark.sql.ansi.enabled to false to bypass this error."},"queryContext":[]} -- !query @@ -130,7 +130,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 99.999999) struct<> -- !query output java.time.DateTimeException -{"errorClass":"legacy","messageParameters":["Invalid value for SecondOfMinute (valid values 0 - 59): 99. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid value for SecondOfMinute (valid values 0 - 59): 99. If necessary set spark.sql.ansi.enabled to false to bypass this error."},"queryContext":[]} -- !query @@ -139,7 +139,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 999.999999) struct<> -- !query output java.time.DateTimeException -{"errorClass":"legacy","messageParameters":["Invalid value for SecondOfMinute (valid values 0 - 59): 999. If necessary set spark.sql.ansi.enabled to false to bypass this error."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid value for SecondOfMinute (valid values 0 - 59): 999. If necessary set spark.sql.ansi.enabled to false to bypass this error."},"queryContext":[]} -- !query @@ -180,7 +180,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -189,7 +189,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -198,7 +198,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -207,7 +207,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -216,7 +216,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Rounding necessary"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Rounding necessary"},"queryContext":[]} -- !query @@ -314,7 +314,7 @@ select to_timestamp(1) struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":["'1'","\"STRING\"","\"TIMESTAMP_NTZ\"","\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":-1,"stopIndex":-1,"fragment":""}]} +{"errorClass":"CAST_INVALID_INPUT","sqlState":"42000","messageParameters":{"expression":"'1'","sourceType":"\"STRING\"","targetType":"\"TIMESTAMP_NTZ\"","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":-1,"stopIndex":-1,"fragment":""}]} -- !query @@ -323,7 +323,7 @@ select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]') struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2019-10-06 10:11:12.' could not be parsed at index 20","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '2019-10-06 10:11:12.' could not be parsed at index 20","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -388,7 +388,7 @@ select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSS struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '2019-10-06 10:11:12.1234567PST' could not be parsed, unparsed text found at index 26","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '2019-10-06 10:11:12.1234567PST' could not be parsed, unparsed text found at index 26","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -405,7 +405,7 @@ select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd H struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '223456 2019-10-06 10:11:12.123456PST' could not be parsed at index 27","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '223456 2019-10-06 10:11:12.123456PST' could not be parsed at index 27","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -470,7 +470,7 @@ select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '12.1232019-10-06S10:11' could not be parsed at index 7","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '12.1232019-10-06S10:11' could not be parsed at index 7","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -479,7 +479,7 @@ select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Text '12.1232019-10-06S10:11' could not be parsed at index 9","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Text '12.1232019-10-06S10:11' could not be parsed at index 9","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -552,7 +552,7 @@ select to_timestamp("02-29", "MM-dd") struct<> -- !query output org.apache.spark.SparkDateTimeException -{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":["Invalid date 'February 29' as '1970' is not a leap year","\"spark.sql.ansi.enabled\""],"queryContext":[]} +{"errorClass":"CANNOT_PARSE_TIMESTAMP","sqlState":"42000","messageParameters":{"message":"Invalid date 'February 29' as '1970' is not a leap year","ansiConfig":"\"spark.sql.ansi.enabled\""},"queryContext":[]} -- !query @@ -657,7 +657,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP_NTZ))' due to data type mismatch: '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP_NTZ))' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp_ntz; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP_NTZ))' due to data type mismatch: '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP_NTZ))' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp_ntz; line 1 pos 7"},"queryContext":[]} -- !query @@ -666,7 +666,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS TIMESTAMP_NTZ) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: '(CAST('1' AS TIMESTAMP_NTZ) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp_ntz; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS TIMESTAMP_NTZ) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: '(CAST('1' AS TIMESTAMP_NTZ) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp_ntz; line 1 pos 7"},"queryContext":[]} -- !query @@ -675,7 +675,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' (timestamp_ntz and void).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' (timestamp_ntz and void).; line 1 pos 7"},"queryContext":[]} -- !query @@ -684,7 +684,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (void and timestamp_ntz).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (void and timestamp_ntz).; line 1 pos 7"},"queryContext":[]} -- !query @@ -716,7 +716,7 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"},"queryContext":[]} -- !query @@ -725,7 +725,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"},"queryContext":[]} -- !query @@ -734,7 +734,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"},"queryContext":[]} -- !query @@ -743,7 +743,7 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd MM yyyy EEEEE'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out index da67285241e83..2e984c513e224 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out @@ -13,7 +13,7 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n"},"queryContext":[]} -- !query @@ -22,7 +22,7 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n"},"queryContext":[]} -- !query @@ -31,7 +31,7 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n"},"queryContext":[]} -- !query @@ -176,7 +176,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -185,7 +185,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -194,7 +194,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -203,7 +203,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["long overflow"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"long overflow"},"queryContext":[]} -- !query @@ -212,7 +212,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -{"errorClass":"legacy","messageParameters":["Rounding necessary"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Rounding necessary"},"queryContext":[]} -- !query @@ -590,7 +590,7 @@ select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -599,7 +599,7 @@ select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP_NTZ '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP_NTZ '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -632,7 +632,7 @@ select str - timestamp'2011-11-11 11:11:11' from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(ts_view.str - TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(ts_view.str - TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -641,7 +641,7 @@ select timestamp'2011-11-11 11:11:11' - str from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -650,7 +650,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp_ntz and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp_ntz and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -659,7 +659,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (double and timestamp_ntz).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (double and timestamp_ntz).; line 1 pos 7"},"queryContext":[]} -- !query @@ -668,7 +668,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' (timestamp_ntz and void).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' (timestamp_ntz and void).; line 1 pos 7"},"queryContext":[]} -- !query @@ -677,7 +677,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (void and timestamp_ntz).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (void and timestamp_ntz).; line 1 pos 7"},"queryContext":[]} -- !query @@ -709,7 +709,7 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"},"queryContext":[]} -- !query @@ -718,7 +718,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"},"queryContext":[]} -- !query @@ -727,7 +727,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output java.lang.RuntimeException -{"errorClass":"legacy","messageParameters":["Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"},"queryContext":[]} -- !query @@ -736,7 +736,7 @@ select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd MM yyyy EEEEE'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timezone.sql.out b/sql/core/src/test/resources/sql-tests/results/timezone.sql.out index dd8d7407a2a07..22bba0b922195 100644 --- a/sql/core/src/test/resources/sql-tests/results/timezone.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timezone.sql.out @@ -45,7 +45,7 @@ SET TIME ZONE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInvalid time zone displacement value(line 1, pos 0)\n\n== SQL ==\nSET TIME ZONE\n^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInvalid time zone displacement value(line 1, pos 0)\n\n== SQL ==\nSET TIME ZONE\n^^^\n"},"queryContext":[]} -- !query @@ -54,7 +54,7 @@ SET TIME ZONE 'invalid/zone' struct<> -- !query output java.lang.IllegalArgumentException -{"errorClass":"legacy","messageParameters":["'invalid/zone' in spark.sql.session.timeZone is invalid. Cannot resolve the given timezone with ZoneId.of(_, ZoneId.SHORT_IDS)"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"'invalid/zone' in spark.sql.session.timeZone is invalid. Cannot resolve the given timezone with ZoneId.of(_, ZoneId.SHORT_IDS)"},"queryContext":[]} -- !query @@ -63,7 +63,7 @@ SET TIME ZONE INTERVAL 3 DAYS struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL 3 DAYS\n--------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL 3 DAYS\n--------------^^^\n"},"queryContext":[]} -- !query @@ -72,7 +72,7 @@ SET TIME ZONE INTERVAL 24 HOURS struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL 24 HOURS\n--------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL 24 HOURS\n--------------^^^\n"},"queryContext":[]} -- !query @@ -81,7 +81,7 @@ SET TIME ZONE INTERVAL '19:40:32' HOUR TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL '19:40:32' HOUR TO SECOND\n--------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL '19:40:32' HOUR TO SECOND\n--------------^^^\n"},"queryContext":[]} -- !query @@ -90,7 +90,7 @@ SET TIME ZONE INTERVAL 10 HOURS 'GMT+1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nInvalid time zone displacement value(line 1, pos 0)\n\n== SQL ==\nSET TIME ZONE INTERVAL 10 HOURS 'GMT+1'\n^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nInvalid time zone displacement value(line 1, pos 0)\n\n== SQL ==\nSET TIME ZONE INTERVAL 10 HOURS 'GMT+1'\n^^^\n"},"queryContext":[]} -- !query @@ -99,4 +99,4 @@ SET TIME ZONE INTERVAL 10 HOURS 1 MILLISECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL 10 HOURS 1 MILLISECOND\n--------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL 10 HOURS 1 MILLISECOND\n--------------^^^\n"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/transform.sql.out b/sql/core/src/test/resources/sql-tests/results/transform.sql.out index 6ee99acaeab18..4fdcb50a26d42 100644 --- a/sql/core/src/test/resources/sql-tests/results/transform.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/transform.sql.out @@ -400,7 +400,7 @@ SELECT a, b, decode(c, 'UTF-8'), d, e, f, g, h, i, j, k, l FROM ( struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nLINES TERMINATED BY only supports newline '\\n' right now: @(line 3, pos 4)\n\n== SQL ==\nSELECT a, b, decode(c, 'UTF-8'), d, e, f, g, h, i, j, k, l FROM (\n SELECT TRANSFORM(a, b, c, d, e, f, g, h, i, j, k, l)\n ROW FORMAT DELIMITED\n----^^^\n FIELDS TERMINATED BY ','\n LINES TERMINATED BY '@'\n NULL DEFINED AS 'NULL'\n USING 'cat' AS (\n a string,\n b string,\n c string,\n d string,\n e string,\n f string,\n g string,\n h string,\n i string,\n j string,\n k string,\n l string)\n ROW FORMAT DELIMITED\n FIELDS TERMINATED BY ','\n LINES TERMINATED BY '@'\n NULL DEFINED AS 'NULL'\n FROM t\n) tmp\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nLINES TERMINATED BY only supports newline '\\n' right now: @(line 3, pos 4)\n\n== SQL ==\nSELECT a, b, decode(c, 'UTF-8'), d, e, f, g, h, i, j, k, l FROM (\n SELECT TRANSFORM(a, b, c, d, e, f, g, h, i, j, k, l)\n ROW FORMAT DELIMITED\n----^^^\n FIELDS TERMINATED BY ','\n LINES TERMINATED BY '@'\n NULL DEFINED AS 'NULL'\n USING 'cat' AS (\n a string,\n b string,\n c string,\n d string,\n e string,\n f string,\n g string,\n h string,\n i string,\n j string,\n k string,\n l string)\n ROW FORMAT DELIMITED\n FIELDS TERMINATED BY ','\n LINES TERMINATED BY '@'\n NULL DEFINED AS 'NULL'\n FROM t\n) tmp\n"},"queryContext":[]} -- !query @@ -686,7 +686,7 @@ WHERE a <= 4 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"UNSUPPORTED_FEATURE","errorSubClass":"TRANSFORM_DISTINCT_ALL","sqlState":"0A000","messageParameters":[],"queryContext":[]} +{"errorClass":"UNSUPPORTED_FEATURE","errorSubClass":"TRANSFORM_DISTINCT_ALL","sqlState":"0A000","messageParameters":{},"queryContext":[]} -- !query @@ -698,7 +698,7 @@ WHERE a <= 4 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"UNSUPPORTED_FEATURE","errorSubClass":"TRANSFORM_DISTINCT_ALL","sqlState":"0A000","messageParameters":[],"queryContext":[]} +{"errorClass":"UNSUPPORTED_FEATURE","errorSubClass":"TRANSFORM_DISTINCT_ALL","sqlState":"0A000","messageParameters":{},"queryContext":[]} -- !query @@ -711,7 +711,7 @@ GROUP BY b struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'AS'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'AS'","hint":""},"queryContext":[]} -- !query @@ -724,7 +724,7 @@ GROUP BY b struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'b_1'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'b_1'","hint":""},"queryContext":[]} -- !query @@ -737,7 +737,7 @@ GROUP BY b struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":["'AS'",""],"queryContext":[]} +{"errorClass":"PARSE_SYNTAX_ERROR","sqlState":"42000","messageParameters":{"error":"'AS'","hint":""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out index 758351f01b2e4..5c299396ace64 100644 --- a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out @@ -141,7 +141,7 @@ SELECT try_add(interval 2 year, interval 2 second) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'INTERVAL '2' YEAR + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2' YEAR' is of interval year type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'INTERVAL '2' YEAR + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2' YEAR' is of interval year type.; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/try_datetime_functions.sql.out b/sql/core/src/test/resources/sql-tests/results/try_datetime_functions.sql.out index 2a7e37edccaf8..c9de17947a8aa 100644 --- a/sql/core/src/test/resources/sql-tests/results/try_datetime_functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/try_datetime_functions.sql.out @@ -45,4 +45,4 @@ select try_to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'dd MM yyyy EEEEEE'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'dd MM yyyy EEEEEE'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/try_element_at.sql.out b/sql/core/src/test/resources/sql-tests/results/try_element_at.sql.out index 21b1f73b85f6c..a2e3fe1eaa783 100644 --- a/sql/core/src/test/resources/sql-tests/results/try_element_at.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/try_element_at.sql.out @@ -5,7 +5,7 @@ SELECT try_element_at(array(1, 2, 3), 0) struct<> -- !query output org.apache.spark.SparkRuntimeException -{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":[],"queryContext":[]} +{"errorClass":"ELEMENT_AT_BY_INDEX_ZERO","messageParameters":{},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out index a4bdad588ad3e..070ef1d432b40 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out @@ -77,7 +77,7 @@ SELECT true = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(true = CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(true = CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -94,7 +94,7 @@ SELECT true = cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(true = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(true = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -103,7 +103,7 @@ SELECT true = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(true = CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(true = CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -176,7 +176,7 @@ SELECT true <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(true <=> CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(true <=> CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -193,7 +193,7 @@ SELECT true <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(true <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(true <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -202,7 +202,7 @@ SELECT true <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(true <=> CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(true <=> CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -275,7 +275,7 @@ SELECT cast('1' as binary) = true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = true)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = true)' (binary and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) = true)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = true)' (binary and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -292,7 +292,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = true)' (timestamp and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = true)' (timestamp and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -301,7 +301,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = true)' (date and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = true)' (date and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -374,7 +374,7 @@ SELECT cast('1' as binary) <=> true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <=> true)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> true)' (binary and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) <=> true)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> true)' (binary and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -391,7 +391,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> true)' (timestamp and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> true)' (timestamp and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -400,7 +400,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> true)' (date and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> true)' (date and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -473,7 +473,7 @@ SELECT false = cast('0' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(false = CAST('0' AS BINARY))' due to data type mismatch: differing types in '(false = CAST('0' AS BINARY))' (boolean and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(false = CAST('0' AS BINARY))' due to data type mismatch: differing types in '(false = CAST('0' AS BINARY))' (boolean and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -490,7 +490,7 @@ SELECT false = cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(false = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(false = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(false = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(false = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -499,7 +499,7 @@ SELECT false = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(false = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(false = CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(false = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(false = CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -572,7 +572,7 @@ SELECT false <=> cast('0' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(false <=> CAST('0' AS BINARY))' due to data type mismatch: differing types in '(false <=> CAST('0' AS BINARY))' (boolean and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(false <=> CAST('0' AS BINARY))' due to data type mismatch: differing types in '(false <=> CAST('0' AS BINARY))' (boolean and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -589,7 +589,7 @@ SELECT false <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(false <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(false <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(false <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(false <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -598,7 +598,7 @@ SELECT false <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(false <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(false <=> CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(false <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(false <=> CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -671,7 +671,7 @@ SELECT cast('0' as binary) = false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('0' AS BINARY) = false)' due to data type mismatch: differing types in '(CAST('0' AS BINARY) = false)' (binary and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('0' AS BINARY) = false)' due to data type mismatch: differing types in '(CAST('0' AS BINARY) = false)' (binary and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -688,7 +688,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = false)' (timestamp and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = false)' (timestamp and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -697,7 +697,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = false)' (date and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = false)' (date and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -770,7 +770,7 @@ SELECT cast('0' as binary) <=> false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('0' AS BINARY) <=> false)' due to data type mismatch: differing types in '(CAST('0' AS BINARY) <=> false)' (binary and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('0' AS BINARY) <=> false)' due to data type mismatch: differing types in '(CAST('0' AS BINARY) <=> false)' (binary and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -787,7 +787,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> false)' (timestamp and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> false)' (timestamp and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -796,4 +796,4 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> false)' (date and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> false)' (date and boolean).; line 1 pos 7"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out index 0c246d3ed8700..5a8350b45a6f6 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out @@ -77,7 +77,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast('2' as binary) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE binary END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE binary END; line 1 pos 7"},"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE boolean END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE boolean END; line 1 pos 7"},"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE timestamp END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE timestamp END; line 1 pos 7"},"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE date END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE date END; line 1 pos 7"},"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast('2' as binary) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE binary END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE binary END; line 1 pos 7"},"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast(2 as boolean) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE boolean END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE boolean END; line 1 pos 7"},"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE timestamp END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE timestamp END; line 1 pos 7"},"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE date END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE date END; line 1 pos 7"},"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE binary END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE binary END; line 1 pos 7"},"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE boolean END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE boolean END; line 1 pos 7"},"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast('2017-12-11 09:30:00.0' as t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE timestamp END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE timestamp END; line 1 pos 7"},"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast('2017-12-11 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE date END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE date END; line 1 pos 7"},"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE binary END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE binary END; line 1 pos 7"},"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE boolean END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE boolean END; line 1 pos 7"},"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast('2017-12-11 09:30:00.0' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE timestamp END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE timestamp END; line 1 pos 7"},"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE date END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE date END; line 1 pos 7"},"queryContext":[]} -- !query @@ -477,7 +477,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE binary END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE binary END; line 1 pos 7"},"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE boolean END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE boolean END; line 1 pos 7"},"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast('2017-12-11 09:30:00.0' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE timestamp END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE timestamp END; line 1 pos 7"},"queryContext":[]} -- !query @@ -504,7 +504,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast('2017-12-11 09:30:00' as d struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE date END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE date END; line 1 pos 7"},"queryContext":[]} -- !query @@ -577,7 +577,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE binary END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE binary END; line 1 pos 7"},"queryContext":[]} -- !query @@ -586,7 +586,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE boolean END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE boolean END; line 1 pos 7"},"queryContext":[]} -- !query @@ -595,7 +595,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast('2017-12-11 09:30:00.0' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE timestamp END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE timestamp END; line 1 pos 7"},"queryContext":[]} -- !query @@ -604,7 +604,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE date END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE date END; line 1 pos 7"},"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast('2' as binary) EN struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE binary END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE binary END; line 1 pos 7"},"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast(2 as boolean) END struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE boolean END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE boolean END; line 1 pos 7"},"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast('2017-12-11 09:30 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE timestamp END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE timestamp END; line 1 pos 7"},"queryContext":[]} -- !query @@ -704,7 +704,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast('2017-12-11 09:30 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE date END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE date END; line 1 pos 7"},"queryContext":[]} -- !query @@ -777,7 +777,7 @@ SELECT CASE WHEN true THEN cast(1 as string) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN string ELSE binary END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN string ELSE binary END; line 1 pos 7"},"queryContext":[]} -- !query @@ -786,7 +786,7 @@ SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN string ELSE boolean END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN string ELSE boolean END; line 1 pos 7"},"queryContext":[]} -- !query @@ -811,7 +811,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as tinyint) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE tinyint END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE tinyint END; line 1 pos 7"},"queryContext":[]} -- !query @@ -820,7 +820,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as smallint) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE smallint END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE smallint END; line 1 pos 7"},"queryContext":[]} -- !query @@ -829,7 +829,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as int) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE int END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE int END; line 1 pos 7"},"queryContext":[]} -- !query @@ -838,7 +838,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as bigint) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE bigint END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE bigint END; line 1 pos 7"},"queryContext":[]} -- !query @@ -847,7 +847,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as float) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE float END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE float END; line 1 pos 7"},"queryContext":[]} -- !query @@ -856,7 +856,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as double) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE double END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE double END; line 1 pos 7"},"queryContext":[]} -- !query @@ -865,7 +865,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as decimal(10, 0)) EN struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE decimal(10,0) END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE decimal(10,0) END; line 1 pos 7"},"queryContext":[]} -- !query @@ -874,7 +874,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as string) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS STRING) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE string END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS STRING) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE string END; line 1 pos 7"},"queryContext":[]} -- !query @@ -891,7 +891,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as boolean) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE boolean END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE boolean END; line 1 pos 7"},"queryContext":[]} -- !query @@ -900,7 +900,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE timestamp END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE timestamp END; line 1 pos 7"},"queryContext":[]} -- !query @@ -909,7 +909,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE date END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE date END; line 1 pos 7"},"queryContext":[]} -- !query @@ -918,7 +918,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as tinyint) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE tinyint END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE tinyint END; line 1 pos 7"},"queryContext":[]} -- !query @@ -927,7 +927,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as smallint) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE smallint END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE smallint END; line 1 pos 7"},"queryContext":[]} -- !query @@ -936,7 +936,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as int) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE int END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE int END; line 1 pos 7"},"queryContext":[]} -- !query @@ -945,7 +945,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as bigint) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE bigint END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE bigint END; line 1 pos 7"},"queryContext":[]} -- !query @@ -954,7 +954,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as float) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE float END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE float END; line 1 pos 7"},"queryContext":[]} -- !query @@ -963,7 +963,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as double) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE double END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE double END; line 1 pos 7"},"queryContext":[]} -- !query @@ -972,7 +972,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as decimal(10, 0)) END struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE decimal(10,0) END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE decimal(10,0) END; line 1 pos 7"},"queryContext":[]} -- !query @@ -981,7 +981,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as string) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS STRING) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE string END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS STRING) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE string END; line 1 pos 7"},"queryContext":[]} -- !query @@ -990,7 +990,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast('2' as binary) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE binary END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE binary END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1007,7 +1007,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE timestamp END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE timestamp END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1016,7 +1016,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE date END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE date END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1025,7 +1025,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE tinyint END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE tinyint END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1034,7 +1034,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE smallint END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE smallint END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1043,7 +1043,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE int END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE int END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1052,7 +1052,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE bigint END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE bigint END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1061,7 +1061,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE float END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE float END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1070,7 +1070,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE double END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE double END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1079,7 +1079,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE decimal(10,0) END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE decimal(10,0) END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1096,7 +1096,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE binary END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE binary END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1105,7 +1105,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE boolean END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE boolean END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1130,7 +1130,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE tinyint END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE tinyint END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1139,7 +1139,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as sm struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE smallint END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE smallint END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1148,7 +1148,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as in struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE int END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE int END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1157,7 +1157,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as bi struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE bigint END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE bigint END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1166,7 +1166,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as fl struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE float END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE float END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1175,7 +1175,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as do struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE double END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE double END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1184,7 +1184,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as de struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE decimal(10,0) END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE decimal(10,0) END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1201,7 +1201,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast('2' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE binary END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE binary END; line 1 pos 7"},"queryContext":[]} -- !query @@ -1210,7 +1210,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as bo struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE boolean END; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE boolean END; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out index f6908adc5dfbc..f1dbf5ca256b9 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out @@ -13,7 +13,7 @@ select cast(1 as tinyint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -22,7 +22,7 @@ select cast(1 as smallint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -31,7 +31,7 @@ select cast(1 as int) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS INT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS INT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -40,7 +40,7 @@ select cast(1 as bigint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -49,7 +49,7 @@ select cast(1 as float) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -58,7 +58,7 @@ select cast(1 as double) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -67,7 +67,7 @@ select cast(1 as decimal(10, 0)) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -92,7 +92,7 @@ select cast('1' as binary) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -101,7 +101,7 @@ select cast(1 as boolean) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -126,7 +126,7 @@ select interval 2 day + cast(1 as tinyint) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -135,7 +135,7 @@ select interval 2 day + cast(1 as smallint) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -144,7 +144,7 @@ select interval 2 day + cast(1 as int) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS INT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS INT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -153,7 +153,7 @@ select interval 2 day + cast(1 as bigint) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -162,7 +162,7 @@ select interval 2 day + cast(1 as float) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -171,7 +171,7 @@ select interval 2 day + cast(1 as double) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -180,7 +180,7 @@ select interval 2 day + cast(1 as decimal(10, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -205,7 +205,7 @@ select interval 2 day + cast('1' as binary) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -214,7 +214,7 @@ select interval 2 day + cast(1 as boolean) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -239,7 +239,7 @@ select cast(1 as tinyint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS TINYINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS TINYINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -248,7 +248,7 @@ select cast(1 as smallint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS SMALLINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS SMALLINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -257,7 +257,7 @@ select cast(1 as int) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS INT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS INT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -266,7 +266,7 @@ select cast(1 as bigint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS BIGINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS BIGINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -275,7 +275,7 @@ select cast(1 as float) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS FLOAT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS FLOAT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -284,7 +284,7 @@ select cast(1 as double) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS DOUBLE) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS DOUBLE) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -293,7 +293,7 @@ select cast(1 as decimal(10, 0)) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS DECIMAL(10,0)) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS DECIMAL(10,0)) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -318,7 +318,7 @@ select cast('1' as binary) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST('1' AS BINARY) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST('1' AS BINARY) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -327,7 +327,7 @@ select cast(1 as boolean) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'CAST(1 AS BOOLEAN) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'CAST(1 AS BOOLEAN) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out index bfac6e87dac44..86d1cfd96ff8a 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out @@ -237,7 +237,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -246,7 +246,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -255,7 +255,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -264,7 +264,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -273,7 +273,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -282,7 +282,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -291,7 +291,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -300,7 +300,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -309,7 +309,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -318,7 +318,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -327,7 +327,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -336,7 +336,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -601,7 +601,7 @@ SELECT cast(1 as decimal(3, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -610,7 +610,7 @@ SELECT cast(1 as decimal(5, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -619,7 +619,7 @@ SELECT cast(1 as decimal(10, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -628,7 +628,7 @@ SELECT cast(1 as decimal(20, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -637,7 +637,7 @@ SELECT cast(1 as decimal(3, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -646,7 +646,7 @@ SELECT cast(1 as decimal(5, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -655,7 +655,7 @@ SELECT cast(1 as decimal(10, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -664,7 +664,7 @@ SELECT cast(1 as decimal(20, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -673,7 +673,7 @@ SELECT cast(1 as decimal(3, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -682,7 +682,7 @@ SELECT cast(1 as decimal(5, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -691,7 +691,7 @@ SELECT cast(1 as decimal(10, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -700,7 +700,7 @@ SELECT cast(1 as decimal(20, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -709,7 +709,7 @@ SELECT cast(1 as decimal(3, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -718,7 +718,7 @@ SELECT cast(1 as decimal(5, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -727,7 +727,7 @@ SELECT cast(1 as decimal(10, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -736,7 +736,7 @@ SELECT cast(1 as decimal(20, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -969,7 +969,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -978,7 +978,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -987,7 +987,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -996,7 +996,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1005,7 +1005,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1014,7 +1014,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1023,7 +1023,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1032,7 +1032,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1041,7 +1041,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1050,7 +1050,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1059,7 +1059,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1068,7 +1068,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1333,7 +1333,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) - CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) - CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1342,7 +1342,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) - CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) - CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1351,7 +1351,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1360,7 +1360,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) - CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) - CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1369,7 +1369,7 @@ SELECT cast(1 as decimal(3, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) - CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) - CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1378,7 +1378,7 @@ SELECT cast(1 as decimal(5, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) - CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) - CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1387,7 +1387,7 @@ SELECT cast(1 as decimal(10, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1396,7 +1396,7 @@ SELECT cast(1 as decimal(20, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) - CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) - CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1405,7 +1405,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1414,7 +1414,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1423,7 +1423,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1432,7 +1432,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1441,7 +1441,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1450,7 +1450,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1459,7 +1459,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1468,7 +1468,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -1701,7 +1701,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1710,7 +1710,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1719,7 +1719,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1728,7 +1728,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1737,7 +1737,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1746,7 +1746,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1755,7 +1755,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1764,7 +1764,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1773,7 +1773,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1782,7 +1782,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1791,7 +1791,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1800,7 +1800,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2065,7 +2065,7 @@ SELECT cast(1 as decimal(3, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2074,7 +2074,7 @@ SELECT cast(1 as decimal(5, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2083,7 +2083,7 @@ SELECT cast(1 as decimal(10, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2092,7 +2092,7 @@ SELECT cast(1 as decimal(20, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2101,7 +2101,7 @@ SELECT cast(1 as decimal(3, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2110,7 +2110,7 @@ SELECT cast(1 as decimal(5, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2119,7 +2119,7 @@ SELECT cast(1 as decimal(10, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2128,7 +2128,7 @@ SELECT cast(1 as decimal(20, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2137,7 +2137,7 @@ SELECT cast(1 as decimal(3, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2146,7 +2146,7 @@ SELECT cast(1 as decimal(5, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2155,7 +2155,7 @@ SELECT cast(1 as decimal(10, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2164,7 +2164,7 @@ SELECT cast(1 as decimal(20, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2173,7 +2173,7 @@ SELECT cast(1 as decimal(3, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2182,7 +2182,7 @@ SELECT cast(1 as decimal(5, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2191,7 +2191,7 @@ SELECT cast(1 as decimal(10, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2200,7 +2200,7 @@ SELECT cast(1 as decimal(20, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2433,7 +2433,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2442,7 +2442,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2451,7 +2451,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2460,7 +2460,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2469,7 +2469,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2478,7 +2478,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2487,7 +2487,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2496,7 +2496,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2505,7 +2505,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2514,7 +2514,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2523,7 +2523,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2532,7 +2532,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2797,7 +2797,7 @@ SELECT cast(1 as decimal(3, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2806,7 +2806,7 @@ SELECT cast(1 as decimal(5, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2815,7 +2815,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2824,7 +2824,7 @@ SELECT cast(1 as decimal(20, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2833,7 +2833,7 @@ SELECT cast(1 as decimal(3, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2842,7 +2842,7 @@ SELECT cast(1 as decimal(5, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2851,7 +2851,7 @@ SELECT cast(1 as decimal(10, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2860,7 +2860,7 @@ SELECT cast(1 as decimal(20, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2869,7 +2869,7 @@ SELECT cast(1 as decimal(3, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2878,7 +2878,7 @@ SELECT cast(1 as decimal(5, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2887,7 +2887,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2896,7 +2896,7 @@ SELECT cast(1 as decimal(20, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2905,7 +2905,7 @@ SELECT cast(1 as decimal(3, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2914,7 +2914,7 @@ SELECT cast(1 as decimal(5, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2923,7 +2923,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -2932,7 +2932,7 @@ SELECT cast(1 as decimal(20, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3165,7 +3165,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3174,7 +3174,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3183,7 +3183,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3192,7 +3192,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3201,7 +3201,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3210,7 +3210,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3219,7 +3219,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3228,7 +3228,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3237,7 +3237,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3246,7 +3246,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3255,7 +3255,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3264,7 +3264,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3529,7 +3529,7 @@ SELECT cast(1 as decimal(3, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3538,7 +3538,7 @@ SELECT cast(1 as decimal(5, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3547,7 +3547,7 @@ SELECT cast(1 as decimal(10, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3556,7 +3556,7 @@ SELECT cast(1 as decimal(20, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3565,7 +3565,7 @@ SELECT cast(1 as decimal(3, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3574,7 +3574,7 @@ SELECT cast(1 as decimal(5, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3583,7 +3583,7 @@ SELECT cast(1 as decimal(10, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3592,7 +3592,7 @@ SELECT cast(1 as decimal(20, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3601,7 +3601,7 @@ SELECT cast(1 as decimal(3, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3610,7 +3610,7 @@ SELECT cast(1 as decimal(5, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3619,7 +3619,7 @@ SELECT cast(1 as decimal(10, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3628,7 +3628,7 @@ SELECT cast(1 as decimal(20, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3637,7 +3637,7 @@ SELECT cast(1 as decimal(3, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3646,7 +3646,7 @@ SELECT cast(1 as decimal(5, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3655,7 +3655,7 @@ SELECT cast(1 as decimal(10, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3664,7 +3664,7 @@ SELECT cast(1 as decimal(20, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3897,7 +3897,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(3, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3906,7 +3906,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(5, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3915,7 +3915,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3924,7 +3924,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(20, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3933,7 +3933,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(3, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3942,7 +3942,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(5, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3951,7 +3951,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(10, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3960,7 +3960,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(20, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3969,7 +3969,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(3, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3978,7 +3978,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(5, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3987,7 +3987,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(10, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -3996,7 +3996,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(20, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4261,7 +4261,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4270,7 +4270,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4279,7 +4279,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4288,7 +4288,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4297,7 +4297,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4306,7 +4306,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4315,7 +4315,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4324,7 +4324,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4333,7 +4333,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4342,7 +4342,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4351,7 +4351,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4360,7 +4360,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4369,7 +4369,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4378,7 +4378,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4387,7 +4387,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4396,7 +4396,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4629,7 +4629,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4638,7 +4638,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4647,7 +4647,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4656,7 +4656,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4665,7 +4665,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4674,7 +4674,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4683,7 +4683,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4692,7 +4692,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4701,7 +4701,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4710,7 +4710,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4719,7 +4719,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4728,7 +4728,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -4993,7 +4993,7 @@ SELECT cast(1 as decimal(3, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5002,7 +5002,7 @@ SELECT cast(1 as decimal(5, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5011,7 +5011,7 @@ SELECT cast(1 as decimal(10, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5020,7 +5020,7 @@ SELECT cast(1 as decimal(20, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5061,7 +5061,7 @@ SELECT cast(1 as decimal(3, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5070,7 +5070,7 @@ SELECT cast(1 as decimal(5, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5079,7 +5079,7 @@ SELECT cast(1 as decimal(10, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5088,7 +5088,7 @@ SELECT cast(1 as decimal(20, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5097,7 +5097,7 @@ SELECT cast(1 as decimal(3, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5106,7 +5106,7 @@ SELECT cast(1 as decimal(5, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5115,7 +5115,7 @@ SELECT cast(1 as decimal(10, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5124,7 +5124,7 @@ SELECT cast(1 as decimal(20, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5357,7 +5357,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5366,7 +5366,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5375,7 +5375,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5384,7 +5384,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5393,7 +5393,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(3, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5402,7 +5402,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(5, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5411,7 +5411,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(10, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5420,7 +5420,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(20, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5429,7 +5429,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5438,7 +5438,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5447,7 +5447,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5456,7 +5456,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5721,7 +5721,7 @@ SELECT cast(1 as decimal(3, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5730,7 +5730,7 @@ SELECT cast(1 as decimal(5, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5739,7 +5739,7 @@ SELECT cast(1 as decimal(10, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5748,7 +5748,7 @@ SELECT cast(1 as decimal(20, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5789,7 +5789,7 @@ SELECT cast(1 as decimal(3, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5798,7 +5798,7 @@ SELECT cast(1 as decimal(5, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5807,7 +5807,7 @@ SELECT cast(1 as decimal(10, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5816,7 +5816,7 @@ SELECT cast(1 as decimal(20, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5825,7 +5825,7 @@ SELECT cast(1 as decimal(3, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5834,7 +5834,7 @@ SELECT cast(1 as decimal(5, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5843,7 +5843,7 @@ SELECT cast(1 as decimal(10, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -5852,7 +5852,7 @@ SELECT cast(1 as decimal(20, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6085,7 +6085,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6094,7 +6094,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6103,7 +6103,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6112,7 +6112,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6121,7 +6121,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6130,7 +6130,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6139,7 +6139,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6148,7 +6148,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6157,7 +6157,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6166,7 +6166,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6175,7 +6175,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6184,7 +6184,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6449,7 +6449,7 @@ SELECT cast(1 as decimal(3, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6458,7 +6458,7 @@ SELECT cast(1 as decimal(5, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6467,7 +6467,7 @@ SELECT cast(1 as decimal(10, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6476,7 +6476,7 @@ SELECT cast(1 as decimal(20, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6485,7 +6485,7 @@ SELECT cast(1 as decimal(3, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6494,7 +6494,7 @@ SELECT cast(1 as decimal(5, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6503,7 +6503,7 @@ SELECT cast(1 as decimal(10, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6512,7 +6512,7 @@ SELECT cast(1 as decimal(20, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6521,7 +6521,7 @@ SELECT cast(1 as decimal(3, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6530,7 +6530,7 @@ SELECT cast(1 as decimal(5, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6539,7 +6539,7 @@ SELECT cast(1 as decimal(10, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6548,7 +6548,7 @@ SELECT cast(1 as decimal(20, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6557,7 +6557,7 @@ SELECT cast(1 as decimal(3, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6566,7 +6566,7 @@ SELECT cast(1 as decimal(5, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6575,7 +6575,7 @@ SELECT cast(1 as decimal(10, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6584,7 +6584,7 @@ SELECT cast(1 as decimal(20, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6817,7 +6817,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6826,7 +6826,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6835,7 +6835,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6844,7 +6844,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6853,7 +6853,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(3, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6862,7 +6862,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(5, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6871,7 +6871,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(10, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6880,7 +6880,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(20, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6889,7 +6889,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6898,7 +6898,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6907,7 +6907,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -6916,7 +6916,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7181,7 +7181,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7190,7 +7190,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7199,7 +7199,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7208,7 +7208,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7217,7 +7217,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7226,7 +7226,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7235,7 +7235,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7244,7 +7244,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7253,7 +7253,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7262,7 +7262,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7271,7 +7271,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7280,7 +7280,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7289,7 +7289,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7298,7 +7298,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7307,7 +7307,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7316,7 +7316,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7549,7 +7549,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7558,7 +7558,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7567,7 +7567,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7576,7 +7576,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7585,7 +7585,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7594,7 +7594,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7603,7 +7603,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7612,7 +7612,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7621,7 +7621,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7630,7 +7630,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7639,7 +7639,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7648,7 +7648,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7913,7 +7913,7 @@ SELECT cast(1 as decimal(3, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7922,7 +7922,7 @@ SELECT cast(1 as decimal(5, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7931,7 +7931,7 @@ SELECT cast(1 as decimal(10, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7940,7 +7940,7 @@ SELECT cast(1 as decimal(20, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7949,7 +7949,7 @@ SELECT cast(1 as decimal(3, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7958,7 +7958,7 @@ SELECT cast(1 as decimal(5, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7967,7 +7967,7 @@ SELECT cast(1 as decimal(10, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7976,7 +7976,7 @@ SELECT cast(1 as decimal(20, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7985,7 +7985,7 @@ SELECT cast(1 as decimal(3, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -7994,7 +7994,7 @@ SELECT cast(1 as decimal(5, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8003,7 +8003,7 @@ SELECT cast(1 as decimal(10, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8012,7 +8012,7 @@ SELECT cast(1 as decimal(20, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8021,7 +8021,7 @@ SELECT cast(1 as decimal(3, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8030,7 +8030,7 @@ SELECT cast(1 as decimal(5, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8039,7 +8039,7 @@ SELECT cast(1 as decimal(10, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8048,7 +8048,7 @@ SELECT cast(1 as decimal(20, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8281,7 +8281,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8290,7 +8290,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8299,7 +8299,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8308,7 +8308,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8317,7 +8317,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(3, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8326,7 +8326,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(5, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8335,7 +8335,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(10, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8344,7 +8344,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(20, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8353,7 +8353,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8362,7 +8362,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8371,7 +8371,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8380,7 +8380,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8645,7 +8645,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8654,7 +8654,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8663,7 +8663,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8672,7 +8672,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8681,7 +8681,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8690,7 +8690,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8699,7 +8699,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8708,7 +8708,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8717,7 +8717,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8726,7 +8726,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8735,7 +8735,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8744,7 +8744,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8753,7 +8753,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8762,7 +8762,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8771,7 +8771,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -8780,7 +8780,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9013,7 +9013,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9022,7 +9022,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9031,7 +9031,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9040,7 +9040,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9049,7 +9049,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(3, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9058,7 +9058,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(5, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9067,7 +9067,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(10, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9076,7 +9076,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(20, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9085,7 +9085,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9094,7 +9094,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9103,7 +9103,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9112,7 +9112,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9377,7 +9377,7 @@ SELECT cast(1 as decimal(3, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9386,7 +9386,7 @@ SELECT cast(1 as decimal(5, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9395,7 +9395,7 @@ SELECT cast(1 as decimal(10, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9404,7 +9404,7 @@ SELECT cast(1 as decimal(20, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9445,7 +9445,7 @@ SELECT cast(1 as decimal(3, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9454,7 +9454,7 @@ SELECT cast(1 as decimal(5, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9463,7 +9463,7 @@ SELECT cast(1 as decimal(10, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9472,7 +9472,7 @@ SELECT cast(1 as decimal(20, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9481,7 +9481,7 @@ SELECT cast(1 as decimal(3, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9490,7 +9490,7 @@ SELECT cast(1 as decimal(5, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9499,7 +9499,7 @@ SELECT cast(1 as decimal(10, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -9508,4 +9508,4 @@ SELECT cast(1 as decimal(20, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out index d66765a66d9c3..f09dc13f3eb6a 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out @@ -77,7 +77,7 @@ SELECT cast(1 as tinyint) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('1' AS BINARY))' (tinyint and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS TINYINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('1' AS BINARY))' (tinyint and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT cast(1 as tinyint) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST(1 AS BOOLEAN))' (tinyint and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS TINYINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST(1 AS BOOLEAN))' (tinyint and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT cast(1 as tinyint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (tinyint and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (tinyint and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT cast(1 as tinyint) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00' AS DATE))' (tinyint and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00' AS DATE))' (tinyint and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT cast(1 as smallint) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('1' AS BINARY))' (smallint and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS SMALLINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('1' AS BINARY))' (smallint and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT cast(1 as smallint) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST(1 AS BOOLEAN))' (smallint and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS SMALLINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST(1 AS BOOLEAN))' (smallint and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT cast(1 as smallint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (smallint and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (smallint and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SELECT cast(1 as smallint) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00' AS DATE))' (smallint and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00' AS DATE))' (smallint and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT cast(1 as int) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('1' AS BINARY))' (int and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS INT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('1' AS BINARY))' (int and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT cast(1 as int) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST(1 AS BOOLEAN))' (int and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS INT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST(1 AS BOOLEAN))' (int and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT cast(1 as int) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (int and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (int and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT cast(1 as int) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00' AS DATE))' (int and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00' AS DATE))' (int and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT cast(1 as bigint) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('1' AS BINARY))' (bigint and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BIGINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('1' AS BINARY))' (bigint and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT cast(1 as bigint) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST(1 AS BOOLEAN))' (bigint and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BIGINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST(1 AS BOOLEAN))' (bigint and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT cast(1 as bigint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (bigint and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (bigint and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT cast(1 as bigint) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00' AS DATE))' (bigint and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00' AS DATE))' (bigint and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -477,7 +477,7 @@ SELECT cast(1 as float) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('1' AS BINARY))' (float and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS FLOAT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('1' AS BINARY))' (float and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT cast(1 as float) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST(1 AS BOOLEAN))' (float and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS FLOAT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST(1 AS BOOLEAN))' (float and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT cast(1 as float) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (float and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (float and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -504,7 +504,7 @@ SELECT cast(1 as float) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00' AS DATE))' (float and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00' AS DATE))' (float and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -577,7 +577,7 @@ SELECT cast(1 as double) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -586,7 +586,7 @@ SELECT cast(1 as double) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -595,7 +595,7 @@ SELECT cast(1 as double) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -604,7 +604,7 @@ SELECT cast(1 as double) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT cast(1 as decimal(10, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -704,7 +704,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -777,7 +777,7 @@ SELECT cast(1 as string) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -786,7 +786,7 @@ SELECT cast(1 as string) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -795,7 +795,7 @@ SELECT cast(1 as string) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -804,7 +804,7 @@ SELECT cast(1 as string) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -813,7 +813,7 @@ SELECT cast('1' as binary) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS TINYINT))' (binary and tinyint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS TINYINT))' (binary and tinyint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -822,7 +822,7 @@ SELECT cast('1' as binary) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS SMALLINT))' (binary and smallint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS SMALLINT))' (binary and smallint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -831,7 +831,7 @@ SELECT cast('1' as binary) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS INT))' (binary and int).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS INT))' (binary and int).; line 1 pos 7"},"queryContext":[]} -- !query @@ -840,7 +840,7 @@ SELECT cast('1' as binary) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS BIGINT))' (binary and bigint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS BIGINT))' (binary and bigint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -849,7 +849,7 @@ SELECT cast('1' as binary) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS FLOAT))' (binary and float).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS FLOAT))' (binary and float).; line 1 pos 7"},"queryContext":[]} -- !query @@ -858,7 +858,7 @@ SELECT cast('1' as binary) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DOUBLE))' (binary and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -867,7 +867,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -876,7 +876,7 @@ SELECT cast('1' as binary) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(CAST(1 AS STRING) AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(CAST(1 AS STRING) AS DOUBLE))' (binary and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -885,7 +885,7 @@ SELECT cast('1' as binary) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST('1' AS BINARY))' due to data type mismatch: '(CAST('1' AS BINARY) / CAST('1' AS BINARY))' requires (double or decimal) type, not binary; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST('1' AS BINARY))' due to data type mismatch: '(CAST('1' AS BINARY) / CAST('1' AS BINARY))' requires (double or decimal) type, not binary; line 1 pos 7"},"queryContext":[]} -- !query @@ -894,7 +894,7 @@ SELECT cast('1' as binary) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS BOOLEAN))' (binary and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS BOOLEAN))' (binary and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -903,7 +903,7 @@ SELECT cast('1' as binary) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (binary and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (binary and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -912,7 +912,7 @@ SELECT cast('1' as binary) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00' AS DATE))' (binary and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00' AS DATE))' (binary and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -921,7 +921,7 @@ SELECT cast(1 as boolean) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS TINYINT))' (boolean and tinyint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS TINYINT))' (boolean and tinyint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -930,7 +930,7 @@ SELECT cast(1 as boolean) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS SMALLINT))' (boolean and smallint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS SMALLINT))' (boolean and smallint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -939,7 +939,7 @@ SELECT cast(1 as boolean) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS INT))' (boolean and int).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS INT))' (boolean and int).; line 1 pos 7"},"queryContext":[]} -- !query @@ -948,7 +948,7 @@ SELECT cast(1 as boolean) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS BIGINT))' (boolean and bigint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS BIGINT))' (boolean and bigint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -957,7 +957,7 @@ SELECT cast(1 as boolean) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS FLOAT))' (boolean and float).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS FLOAT))' (boolean and float).; line 1 pos 7"},"queryContext":[]} -- !query @@ -966,7 +966,7 @@ SELECT cast(1 as boolean) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS DOUBLE))' (boolean and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -975,7 +975,7 @@ SELECT cast(1 as boolean) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS DECIMAL(10,0)))' (boolean and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS DECIMAL(10,0)))' (boolean and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -984,7 +984,7 @@ SELECT cast(1 as boolean) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(CAST(1 AS STRING) AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(CAST(1 AS STRING) AS DOUBLE))' (boolean and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -993,7 +993,7 @@ SELECT cast(1 as boolean) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1002,7 +1002,7 @@ SELECT cast(1 as boolean) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS BOOLEAN))' due to data type mismatch: '(CAST(1 AS BOOLEAN) / CAST(1 AS BOOLEAN))' requires (double or decimal) type, not boolean; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS BOOLEAN))' due to data type mismatch: '(CAST(1 AS BOOLEAN) / CAST(1 AS BOOLEAN))' requires (double or decimal) type, not boolean; line 1 pos 7"},"queryContext":[]} -- !query @@ -1011,7 +1011,7 @@ SELECT cast(1 as boolean) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1020,7 +1020,7 @@ SELECT cast(1 as boolean) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1029,7 +1029,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS TINYINT))' (timestamp and tinyint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS TINYINT))' (timestamp and tinyint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1038,7 +1038,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS SMALLINT))' (timestamp and smallint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS SMALLINT))' (timestamp and smallint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1047,7 +1047,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS INT))' (timestamp and int).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS INT))' (timestamp and int).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1056,7 +1056,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BIGINT))' (timestamp and bigint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BIGINT))' (timestamp and bigint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1065,7 +1065,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS FLOAT))' (timestamp and float).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS FLOAT))' (timestamp and float).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1074,7 +1074,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DOUBLE))' (timestamp and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1083,7 +1083,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1092,7 +1092,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(CAST(1 AS STRING) AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(CAST(1 AS STRING) AS DOUBLE))' (timestamp and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1101,7 +1101,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS BINARY))' (timestamp and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS BINARY))' (timestamp and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1110,7 +1110,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BOOLEAN))' (timestamp and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BOOLEAN))' (timestamp and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1119,7 +1119,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' requires (double or decimal) type, not timestamp; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' requires (double or decimal) type, not timestamp; line 1 pos 7"},"queryContext":[]} -- !query @@ -1128,7 +1128,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00' AS DATE))' (timestamp and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00' AS DATE))' (timestamp and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1137,7 +1137,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS TINYINT))' (date and tinyint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS TINYINT))' (date and tinyint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1146,7 +1146,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS SMALLINT))' (date and smallint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS SMALLINT))' (date and smallint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1155,7 +1155,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS INT))' (date and int).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS INT))' (date and int).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1164,7 +1164,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BIGINT))' (date and bigint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BIGINT))' (date and bigint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1173,7 +1173,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS FLOAT))' (date and float).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS FLOAT))' (date and float).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1182,7 +1182,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DOUBLE))' (date and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DOUBLE))' (date and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1191,7 +1191,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1200,7 +1200,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(CAST(1 AS STRING) AS DOUBLE))' (date and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(CAST(1 AS STRING) AS DOUBLE))' (date and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1209,7 +1209,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS BINARY))' (date and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS BINARY))' (date and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1218,7 +1218,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BOOLEAN))' (date and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BOOLEAN))' (date and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1227,7 +1227,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast('2017-12-11 09:30:00.0' as tim struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (date and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (date and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1236,4 +1236,4 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00' AS DATE))' requires (double or decimal) type, not date; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00' AS DATE))' requires (double or decimal) type, not date; line 1 pos 7"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out index 167b4d0182496..36d050be55676 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out @@ -77,7 +77,7 @@ SELECT IF(true, cast(1 as tinyint), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2' AS BINARY)))' (tinyint and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2' AS BINARY)))' (tinyint and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT IF(true, cast(1 as tinyint), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST(2 AS BOOLEAN)))' (tinyint and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST(2 AS BOOLEAN)))' (tinyint and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT IF(true, cast(1 as tinyint), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (tinyint and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (tinyint and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT IF(true, cast(1 as tinyint), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' (tinyint and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' (tinyint and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT IF(true, cast(1 as smallint), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2' AS BINARY)))' (smallint and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2' AS BINARY)))' (smallint and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT IF(true, cast(1 as smallint), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BOOLEAN)))' (smallint and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BOOLEAN)))' (smallint and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT IF(true, cast(1 as smallint), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (smallint and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (smallint and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SELECT IF(true, cast(1 as smallint), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' (smallint and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' (smallint and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT IF(true, cast(1 as int), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS INT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2' AS BINARY)))' (int and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS INT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2' AS BINARY)))' (int and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT IF(true, cast(1 as int), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS INT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST(2 AS BOOLEAN)))' (int and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS INT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST(2 AS BOOLEAN)))' (int and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT IF(true, cast(1 as int), cast('2017-12-11 09:30:00.0' as timestamp)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (int and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (int and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT IF(true, cast(1 as int), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' (int and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' (int and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT IF(true, cast(1 as bigint), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2' AS BINARY)))' (bigint and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2' AS BINARY)))' (bigint and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT IF(true, cast(1 as bigint), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST(2 AS BOOLEAN)))' (bigint and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST(2 AS BOOLEAN)))' (bigint and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT IF(true, cast(1 as bigint), cast('2017-12-11 09:30:00.0' as timestamp)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (bigint and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (bigint and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT IF(true, cast(1 as bigint), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' (bigint and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' (bigint and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -477,7 +477,7 @@ SELECT IF(true, cast(1 as float), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2' AS BINARY)))' (float and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2' AS BINARY)))' (float and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT IF(true, cast(1 as float), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST(2 AS BOOLEAN)))' (float and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST(2 AS BOOLEAN)))' (float and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT IF(true, cast(1 as float), cast('2017-12-11 09:30:00.0' as timestamp)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (float and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (float and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -504,7 +504,7 @@ SELECT IF(true, cast(1 as float), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' (float and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' (float and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -577,7 +577,7 @@ SELECT IF(true, cast(1 as double), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2' AS BINARY)))' (double and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2' AS BINARY)))' (double and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -586,7 +586,7 @@ SELECT IF(true, cast(1 as double), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST(2 AS BOOLEAN)))' (double and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST(2 AS BOOLEAN)))' (double and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -595,7 +595,7 @@ SELECT IF(true, cast(1 as double), cast('2017-12-11 09:30:00.0' as timestamp)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (double and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -604,7 +604,7 @@ SELECT IF(true, cast(1 as double), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' (double and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' (double and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2' AS BINARY)))' (decimal(10,0) and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2' AS BINARY)))' (decimal(10,0) and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS BOOLEAN)))' (decimal(10,0) and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS BOOLEAN)))' (decimal(10,0) and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00.0' as times struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (decimal(10,0) and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (decimal(10,0) and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -704,7 +704,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00' as date)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' (decimal(10,0) and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' (decimal(10,0) and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -777,7 +777,7 @@ SELECT IF(true, cast(1 as string), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS STRING), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS STRING), CAST('2' AS BINARY)))' (string and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS STRING), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS STRING), CAST('2' AS BINARY)))' (string and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -786,7 +786,7 @@ SELECT IF(true, cast(1 as string), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))' (string and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))' (string and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -811,7 +811,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS TINYINT)))' (binary and tinyint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS TINYINT)))' (binary and tinyint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -820,7 +820,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS SMALLINT)))' (binary and smallint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS SMALLINT)))' (binary and smallint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -829,7 +829,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS INT)))' (binary and int).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS INT)))' (binary and int).; line 1 pos 7"},"queryContext":[]} -- !query @@ -838,7 +838,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS BIGINT)))' (binary and bigint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS BIGINT)))' (binary and bigint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -847,7 +847,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS FLOAT)))' (binary and float).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS FLOAT)))' (binary and float).; line 1 pos 7"},"queryContext":[]} -- !query @@ -856,7 +856,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS DOUBLE)))' (binary and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS DOUBLE)))' (binary and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -865,7 +865,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS DECIMAL(10,0))))' (binary and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS DECIMAL(10,0))))' (binary and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -874,7 +874,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS STRING)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS STRING)))' (binary and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS STRING)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS STRING)))' (binary and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -891,7 +891,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS BOOLEAN)))' (binary and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS BOOLEAN)))' (binary and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -900,7 +900,7 @@ SELECT IF(true, cast('1' as binary), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (binary and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (binary and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -909,7 +909,7 @@ SELECT IF(true, cast('1' as binary), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' (binary and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' (binary and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -918,7 +918,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS TINYINT)))' (boolean and tinyint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS TINYINT)))' (boolean and tinyint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -927,7 +927,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS SMALLINT)))' (boolean and smallint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS SMALLINT)))' (boolean and smallint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -936,7 +936,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS INT)))' (boolean and int).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS INT)))' (boolean and int).; line 1 pos 7"},"queryContext":[]} -- !query @@ -945,7 +945,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS BIGINT)))' (boolean and bigint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS BIGINT)))' (boolean and bigint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -954,7 +954,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS FLOAT)))' (boolean and float).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS FLOAT)))' (boolean and float).; line 1 pos 7"},"queryContext":[]} -- !query @@ -963,7 +963,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DOUBLE)))' (boolean and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DOUBLE)))' (boolean and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -972,7 +972,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DECIMAL(10,0))))' (boolean and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DECIMAL(10,0))))' (boolean and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -981,7 +981,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))' (boolean and string).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))' (boolean and string).; line 1 pos 7"},"queryContext":[]} -- !query @@ -990,7 +990,7 @@ SELECT IF(true, cast(1 as boolean), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2' AS BINARY)))' (boolean and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2' AS BINARY)))' (boolean and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1007,7 +1007,7 @@ SELECT IF(true, cast(1 as boolean), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (boolean and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (boolean and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1016,7 +1016,7 @@ SELECT IF(true, cast(1 as boolean), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' (boolean and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' (boolean and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1025,7 +1025,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as tinyint)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS TINYINT)))' (timestamp and tinyint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS TINYINT)))' (timestamp and tinyint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1034,7 +1034,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as smallint)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS SMALLINT)))' (timestamp and smallint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS SMALLINT)))' (timestamp and smallint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1043,7 +1043,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as int)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS INT)))' (timestamp and int).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS INT)))' (timestamp and int).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1052,7 +1052,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as bigint)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BIGINT)))' (timestamp and bigint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BIGINT)))' (timestamp and bigint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1061,7 +1061,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as float)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS FLOAT)))' (timestamp and float).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS FLOAT)))' (timestamp and float).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1070,7 +1070,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as double)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DOUBLE)))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DOUBLE)))' (timestamp and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1079,7 +1079,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as decimal(10 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DECIMAL(10,0))))' (timestamp and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DECIMAL(10,0))))' (timestamp and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1096,7 +1096,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast('2' as binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('2' AS BINARY)))' (timestamp and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('2' AS BINARY)))' (timestamp and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1105,7 +1105,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as boolean)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BOOLEAN)))' (timestamp and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BOOLEAN)))' (timestamp and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1130,7 +1130,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS TINYINT)))' (date and tinyint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS TINYINT)))' (date and tinyint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1139,7 +1139,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS SMALLINT)))' (date and smallint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS SMALLINT)))' (date and smallint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1148,7 +1148,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS INT)))' (date and int).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS INT)))' (date and int).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1157,7 +1157,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BIGINT)))' (date and bigint).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BIGINT)))' (date and bigint).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1166,7 +1166,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS FLOAT)))' (date and float).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS FLOAT)))' (date and float).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1175,7 +1175,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DOUBLE)))' (date and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DOUBLE)))' (date and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1184,7 +1184,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as decimal(10, 0))) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DECIMAL(10,0))))' (date and decimal(10,0)).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DECIMAL(10,0))))' (date and decimal(10,0)).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1201,7 +1201,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST('2' AS BINARY)))' (date and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST('2' AS BINARY)))' (date and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1210,7 +1210,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BOOLEAN)))' (date and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BOOLEAN)))' (date and boolean).; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out index 7d6eccd61d262..ab66efa8d782b 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out @@ -77,7 +77,7 @@ SELECT cast(1 as tinyint) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: tinyint != binary; line 1 pos 26"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS TINYINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: tinyint != binary; line 1 pos 26"},"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT cast(1 as tinyint) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: tinyint != boolean; line 1 pos 26"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: tinyint != boolean; line 1 pos 26"},"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT cast(1 as tinyint) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: tinyint != timestamp; line 1 pos 26"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS TINYINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: tinyint != timestamp; line 1 pos 26"},"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT cast(1 as tinyint) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: tinyint != date; line 1 pos 26"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS TINYINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: tinyint != date; line 1 pos 26"},"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT cast(1 as smallint) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: smallint != binary; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: smallint != binary; line 1 pos 27"},"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT cast(1 as smallint) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: smallint != boolean; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: smallint != boolean; line 1 pos 27"},"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT cast(1 as smallint) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: smallint != timestamp; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: smallint != timestamp; line 1 pos 27"},"queryContext":[]} -- !query @@ -204,7 +204,7 @@ SELECT cast(1 as smallint) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: smallint != date; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: smallint != date; line 1 pos 27"},"queryContext":[]} -- !query @@ -277,7 +277,7 @@ SELECT cast(1 as int) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: int != binary; line 1 pos 22"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS INT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: int != binary; line 1 pos 22"},"queryContext":[]} -- !query @@ -286,7 +286,7 @@ SELECT cast(1 as int) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: int != boolean; line 1 pos 22"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: int != boolean; line 1 pos 22"},"queryContext":[]} -- !query @@ -295,7 +295,7 @@ SELECT cast(1 as int) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: int != timestamp; line 1 pos 22"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS INT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: int != timestamp; line 1 pos 22"},"queryContext":[]} -- !query @@ -304,7 +304,7 @@ SELECT cast(1 as int) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: int != date; line 1 pos 22"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS INT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: int != date; line 1 pos 22"},"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT cast(1 as bigint) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: bigint != binary; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BIGINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: bigint != binary; line 1 pos 25"},"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT cast(1 as bigint) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: bigint != boolean; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: bigint != boolean; line 1 pos 25"},"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT cast(1 as bigint) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: bigint != timestamp; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BIGINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: bigint != timestamp; line 1 pos 25"},"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT cast(1 as bigint) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: bigint != date; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BIGINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: bigint != date; line 1 pos 25"},"queryContext":[]} -- !query @@ -477,7 +477,7 @@ SELECT cast(1 as float) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: float != binary; line 1 pos 24"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS FLOAT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: float != binary; line 1 pos 24"},"queryContext":[]} -- !query @@ -486,7 +486,7 @@ SELECT cast(1 as float) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: float != boolean; line 1 pos 24"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: float != boolean; line 1 pos 24"},"queryContext":[]} -- !query @@ -495,7 +495,7 @@ SELECT cast(1 as float) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: float != timestamp; line 1 pos 24"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS FLOAT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: float != timestamp; line 1 pos 24"},"queryContext":[]} -- !query @@ -504,7 +504,7 @@ SELECT cast(1 as float) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: float != date; line 1 pos 24"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS FLOAT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: float != date; line 1 pos 24"},"queryContext":[]} -- !query @@ -577,7 +577,7 @@ SELECT cast(1 as double) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: double != binary; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: double != binary; line 1 pos 25"},"queryContext":[]} -- !query @@ -586,7 +586,7 @@ SELECT cast(1 as double) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: double != boolean; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: double != boolean; line 1 pos 25"},"queryContext":[]} -- !query @@ -595,7 +595,7 @@ SELECT cast(1 as double) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: double != timestamp; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: double != timestamp; line 1 pos 25"},"queryContext":[]} -- !query @@ -604,7 +604,7 @@ SELECT cast(1 as double) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: double != date; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: double != date; line 1 pos 25"},"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != binary; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != binary; line 1 pos 33"},"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != boolean; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != boolean; line 1 pos 33"},"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != timestamp; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != timestamp; line 1 pos 33"},"queryContext":[]} -- !query @@ -704,7 +704,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != date; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != date; line 1 pos 33"},"queryContext":[]} -- !query @@ -777,7 +777,7 @@ SELECT cast(1 as string) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS STRING) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: string != binary; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS STRING) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: string != binary; line 1 pos 25"},"queryContext":[]} -- !query @@ -786,7 +786,7 @@ SELECT cast(1 as string) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: string != boolean; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: string != boolean; line 1 pos 25"},"queryContext":[]} -- !query @@ -811,7 +811,7 @@ SELECT cast('1' as binary) in (cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: binary != tinyint; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: binary != tinyint; line 1 pos 27"},"queryContext":[]} -- !query @@ -820,7 +820,7 @@ SELECT cast('1' as binary) in (cast(1 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: binary != smallint; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: binary != smallint; line 1 pos 27"},"queryContext":[]} -- !query @@ -829,7 +829,7 @@ SELECT cast('1' as binary) in (cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: binary != int; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: binary != int; line 1 pos 27"},"queryContext":[]} -- !query @@ -838,7 +838,7 @@ SELECT cast('1' as binary) in (cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: binary != bigint; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: binary != bigint; line 1 pos 27"},"queryContext":[]} -- !query @@ -847,7 +847,7 @@ SELECT cast('1' as binary) in (cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: binary != float; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: binary != float; line 1 pos 27"},"queryContext":[]} -- !query @@ -856,7 +856,7 @@ SELECT cast('1' as binary) in (cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: binary != double; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: binary != double; line 1 pos 27"},"queryContext":[]} -- !query @@ -865,7 +865,7 @@ SELECT cast('1' as binary) in (cast(1 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: binary != decimal(10,0); line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: binary != decimal(10,0); line 1 pos 27"},"queryContext":[]} -- !query @@ -874,7 +874,7 @@ SELECT cast('1' as binary) in (cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: binary != string; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: binary != string; line 1 pos 27"},"queryContext":[]} -- !query @@ -891,7 +891,7 @@ SELECT cast('1' as binary) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: binary != boolean; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: binary != boolean; line 1 pos 27"},"queryContext":[]} -- !query @@ -900,7 +900,7 @@ SELECT cast('1' as binary) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: binary != timestamp; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: binary != timestamp; line 1 pos 27"},"queryContext":[]} -- !query @@ -909,7 +909,7 @@ SELECT cast('1' as binary) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: binary != date; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: binary != date; line 1 pos 27"},"queryContext":[]} -- !query @@ -918,7 +918,7 @@ SELECT true in (cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: boolean != tinyint; line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true IN (CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: boolean != tinyint; line 1 pos 12"},"queryContext":[]} -- !query @@ -927,7 +927,7 @@ SELECT true in (cast(1 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: boolean != smallint; line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true IN (CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: boolean != smallint; line 1 pos 12"},"queryContext":[]} -- !query @@ -936,7 +936,7 @@ SELECT true in (cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: boolean != int; line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true IN (CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: boolean != int; line 1 pos 12"},"queryContext":[]} -- !query @@ -945,7 +945,7 @@ SELECT true in (cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: boolean != bigint; line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true IN (CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: boolean != bigint; line 1 pos 12"},"queryContext":[]} -- !query @@ -954,7 +954,7 @@ SELECT true in (cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: boolean != float; line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true IN (CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: boolean != float; line 1 pos 12"},"queryContext":[]} -- !query @@ -963,7 +963,7 @@ SELECT true in (cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: boolean != double; line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true IN (CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: boolean != double; line 1 pos 12"},"queryContext":[]} -- !query @@ -972,7 +972,7 @@ SELECT true in (cast(1 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: boolean != decimal(10,0); line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true IN (CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: boolean != decimal(10,0); line 1 pos 12"},"queryContext":[]} -- !query @@ -981,7 +981,7 @@ SELECT true in (cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: boolean != string; line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true IN (CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: boolean != string; line 1 pos 12"},"queryContext":[]} -- !query @@ -990,7 +990,7 @@ SELECT true in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: boolean != binary; line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: boolean != binary; line 1 pos 12"},"queryContext":[]} -- !query @@ -1007,7 +1007,7 @@ SELECT true in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: boolean != timestamp; line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: boolean != timestamp; line 1 pos 12"},"queryContext":[]} -- !query @@ -1016,7 +1016,7 @@ SELECT true in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(true IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: boolean != date; line 1 pos 12"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(true IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: boolean != date; line 1 pos 12"},"queryContext":[]} -- !query @@ -1025,7 +1025,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != tinyint; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != tinyint; line 1 pos 50"},"queryContext":[]} -- !query @@ -1034,7 +1034,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as smallint)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != smallint; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != smallint; line 1 pos 50"},"queryContext":[]} -- !query @@ -1043,7 +1043,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS INT)))' due to data type mismatch: Arguments must be same type but were: timestamp != int; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS INT)))' due to data type mismatch: Arguments must be same type but were: timestamp != int; line 1 pos 50"},"queryContext":[]} -- !query @@ -1052,7 +1052,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != bigint; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != bigint; line 1 pos 50"},"queryContext":[]} -- !query @@ -1061,7 +1061,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: timestamp != float; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: timestamp != float; line 1 pos 50"},"queryContext":[]} -- !query @@ -1070,7 +1070,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: timestamp != double; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: timestamp != double; line 1 pos 50"},"queryContext":[]} -- !query @@ -1079,7 +1079,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as decimal(10, 0))) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: timestamp != decimal(10,0); line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: timestamp != decimal(10,0); line 1 pos 50"},"queryContext":[]} -- !query @@ -1096,7 +1096,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2' as binary)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: timestamp != binary; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: timestamp != binary; line 1 pos 50"},"queryContext":[]} -- !query @@ -1105,7 +1105,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: timestamp != boolean; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: timestamp != boolean; line 1 pos 50"},"queryContext":[]} -- !query @@ -1130,7 +1130,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: date != tinyint; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: date != tinyint; line 1 pos 43"},"queryContext":[]} -- !query @@ -1139,7 +1139,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: date != smallint; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: date != smallint; line 1 pos 43"},"queryContext":[]} -- !query @@ -1148,7 +1148,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS INT)))' due to data type mismatch: Arguments must be same type but were: date != int; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS INT)))' due to data type mismatch: Arguments must be same type but were: date != int; line 1 pos 43"},"queryContext":[]} -- !query @@ -1157,7 +1157,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: date != bigint; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: date != bigint; line 1 pos 43"},"queryContext":[]} -- !query @@ -1166,7 +1166,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: date != float; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: date != float; line 1 pos 43"},"queryContext":[]} -- !query @@ -1175,7 +1175,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: date != double; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: date != double; line 1 pos 43"},"queryContext":[]} -- !query @@ -1184,7 +1184,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: date != decimal(10,0); line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: date != decimal(10,0); line 1 pos 43"},"queryContext":[]} -- !query @@ -1201,7 +1201,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: date != binary; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: date != binary; line 1 pos 43"},"queryContext":[]} -- !query @@ -1210,7 +1210,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: date != boolean; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: date != boolean; line 1 pos 43"},"queryContext":[]} -- !query @@ -1299,7 +1299,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: tinyint != binary; line 1 pos 26"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: tinyint != binary; line 1 pos 26"},"queryContext":[]} -- !query @@ -1308,7 +1308,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: tinyint != boolean; line 1 pos 26"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: tinyint != boolean; line 1 pos 26"},"queryContext":[]} -- !query @@ -1317,7 +1317,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast('2017-12-11 09:30:00.0' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: tinyint != timestamp; line 1 pos 26"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: tinyint != timestamp; line 1 pos 26"},"queryContext":[]} -- !query @@ -1326,7 +1326,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: tinyint != date; line 1 pos 26"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: tinyint != date; line 1 pos 26"},"queryContext":[]} -- !query @@ -1399,7 +1399,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: smallint != binary; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: smallint != binary; line 1 pos 27"},"queryContext":[]} -- !query @@ -1408,7 +1408,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: smallint != boolean; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: smallint != boolean; line 1 pos 27"},"queryContext":[]} -- !query @@ -1417,7 +1417,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: smallint != timestamp; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: smallint != timestamp; line 1 pos 27"},"queryContext":[]} -- !query @@ -1426,7 +1426,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: smallint != date; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: smallint != date; line 1 pos 27"},"queryContext":[]} -- !query @@ -1499,7 +1499,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: int != binary; line 1 pos 22"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: int != binary; line 1 pos 22"},"queryContext":[]} -- !query @@ -1508,7 +1508,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: int != boolean; line 1 pos 22"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: int != boolean; line 1 pos 22"},"queryContext":[]} -- !query @@ -1517,7 +1517,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast('2017-12-11 09:30:00.0' as timest struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: int != timestamp; line 1 pos 22"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: int != timestamp; line 1 pos 22"},"queryContext":[]} -- !query @@ -1526,7 +1526,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast('2017-12-11 09:30:00' as date)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: int != date; line 1 pos 22"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: int != date; line 1 pos 22"},"queryContext":[]} -- !query @@ -1599,7 +1599,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: bigint != binary; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: bigint != binary; line 1 pos 25"},"queryContext":[]} -- !query @@ -1608,7 +1608,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: bigint != boolean; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: bigint != boolean; line 1 pos 25"},"queryContext":[]} -- !query @@ -1617,7 +1617,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast('2017-12-11 09:30:00.0' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: bigint != timestamp; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: bigint != timestamp; line 1 pos 25"},"queryContext":[]} -- !query @@ -1626,7 +1626,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast('2017-12-11 09:30:00' as da struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: bigint != date; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: bigint != date; line 1 pos 25"},"queryContext":[]} -- !query @@ -1699,7 +1699,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: float != binary; line 1 pos 24"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: float != binary; line 1 pos 24"},"queryContext":[]} -- !query @@ -1708,7 +1708,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: float != boolean; line 1 pos 24"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: float != boolean; line 1 pos 24"},"queryContext":[]} -- !query @@ -1717,7 +1717,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: float != timestamp; line 1 pos 24"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: float != timestamp; line 1 pos 24"},"queryContext":[]} -- !query @@ -1726,7 +1726,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast('2017-12-11 09:30:00' as date struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: float != date; line 1 pos 24"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: float != date; line 1 pos 24"},"queryContext":[]} -- !query @@ -1799,7 +1799,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: double != binary; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: double != binary; line 1 pos 25"},"queryContext":[]} -- !query @@ -1808,7 +1808,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: double != boolean; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: double != boolean; line 1 pos 25"},"queryContext":[]} -- !query @@ -1817,7 +1817,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast('2017-12-11 09:30:00.0' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: double != timestamp; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: double != timestamp; line 1 pos 25"},"queryContext":[]} -- !query @@ -1826,7 +1826,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast('2017-12-11 09:30:00' as da struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: double != date; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: double != date; line 1 pos 25"},"queryContext":[]} -- !query @@ -1899,7 +1899,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast('1' as bina struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != binary; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != binary; line 1 pos 33"},"queryContext":[]} -- !query @@ -1908,7 +1908,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast(1 as boolea struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != boolean; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != boolean; line 1 pos 33"},"queryContext":[]} -- !query @@ -1917,7 +1917,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast('2017-12-11 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != timestamp; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != timestamp; line 1 pos 33"},"queryContext":[]} -- !query @@ -1926,7 +1926,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast('2017-12-11 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != date; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != date; line 1 pos 33"},"queryContext":[]} -- !query @@ -1999,7 +1999,7 @@ SELECT cast(1 as string) in (cast(1 as string), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: string != binary; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: string != binary; line 1 pos 25"},"queryContext":[]} -- !query @@ -2008,7 +2008,7 @@ SELECT cast(1 as string) in (cast(1 as string), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: string != boolean; line 1 pos 25"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: string != boolean; line 1 pos 25"},"queryContext":[]} -- !query @@ -2033,7 +2033,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: binary != tinyint; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: binary != tinyint; line 1 pos 27"},"queryContext":[]} -- !query @@ -2042,7 +2042,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: binary != smallint; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: binary != smallint; line 1 pos 27"},"queryContext":[]} -- !query @@ -2051,7 +2051,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: binary != int; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: binary != int; line 1 pos 27"},"queryContext":[]} -- !query @@ -2060,7 +2060,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: binary != bigint; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: binary != bigint; line 1 pos 27"},"queryContext":[]} -- !query @@ -2069,7 +2069,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: binary != float; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: binary != float; line 1 pos 27"},"queryContext":[]} -- !query @@ -2078,7 +2078,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: binary != double; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: binary != double; line 1 pos 27"},"queryContext":[]} -- !query @@ -2087,7 +2087,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as decimal(10, 0))) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: binary != decimal(10,0); line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: binary != decimal(10,0); line 1 pos 27"},"queryContext":[]} -- !query @@ -2096,7 +2096,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: binary != string; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: binary != string; line 1 pos 27"},"queryContext":[]} -- !query @@ -2113,7 +2113,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: binary != boolean; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: binary != boolean; line 1 pos 27"},"queryContext":[]} -- !query @@ -2122,7 +2122,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: binary != timestamp; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: binary != timestamp; line 1 pos 27"},"queryContext":[]} -- !query @@ -2131,7 +2131,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: binary != date; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: binary != date; line 1 pos 27"},"queryContext":[]} -- !query @@ -2140,7 +2140,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: boolean != tinyint; line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: boolean != tinyint; line 1 pos 28"},"queryContext":[]} -- !query @@ -2149,7 +2149,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as smallint)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: boolean != smallint; line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: boolean != smallint; line 1 pos 28"},"queryContext":[]} -- !query @@ -2158,7 +2158,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: boolean != int; line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: boolean != int; line 1 pos 28"},"queryContext":[]} -- !query @@ -2167,7 +2167,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: boolean != bigint; line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: boolean != bigint; line 1 pos 28"},"queryContext":[]} -- !query @@ -2176,7 +2176,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: boolean != float; line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: boolean != float; line 1 pos 28"},"queryContext":[]} -- !query @@ -2185,7 +2185,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: boolean != double; line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: boolean != double; line 1 pos 28"},"queryContext":[]} -- !query @@ -2194,7 +2194,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as decimal(10, 0))) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: boolean != decimal(10,0); line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: boolean != decimal(10,0); line 1 pos 28"},"queryContext":[]} -- !query @@ -2203,7 +2203,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: boolean != string; line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: boolean != string; line 1 pos 28"},"queryContext":[]} -- !query @@ -2212,7 +2212,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast('1' as binary)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: boolean != binary; line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: boolean != binary; line 1 pos 28"},"queryContext":[]} -- !query @@ -2229,7 +2229,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast('2017-12-11 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: boolean != timestamp; line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: boolean != timestamp; line 1 pos 28"},"queryContext":[]} -- !query @@ -2238,7 +2238,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast('2017-12-11 09:30:00' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: boolean != date; line 1 pos 28"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: boolean != date; line 1 pos 28"},"queryContext":[]} -- !query @@ -2247,7 +2247,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != tinyint; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != tinyint; line 1 pos 50"},"queryContext":[]} -- !query @@ -2256,7 +2256,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != smallint; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != smallint; line 1 pos 50"},"queryContext":[]} -- !query @@ -2265,7 +2265,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: timestamp != int; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: timestamp != int; line 1 pos 50"},"queryContext":[]} -- !query @@ -2274,7 +2274,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != bigint; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != bigint; line 1 pos 50"},"queryContext":[]} -- !query @@ -2283,7 +2283,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: timestamp != float; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: timestamp != float; line 1 pos 50"},"queryContext":[]} -- !query @@ -2292,7 +2292,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: timestamp != double; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: timestamp != double; line 1 pos 50"},"queryContext":[]} -- !query @@ -2301,7 +2301,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: timestamp != decimal(10,0); line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: timestamp != decimal(10,0); line 1 pos 50"},"queryContext":[]} -- !query @@ -2318,7 +2318,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: timestamp != binary; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: timestamp != binary; line 1 pos 50"},"queryContext":[]} -- !query @@ -2327,7 +2327,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: timestamp != boolean; line 1 pos 50"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: timestamp != boolean; line 1 pos 50"},"queryContext":[]} -- !query @@ -2352,7 +2352,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: date != tinyint; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: date != tinyint; line 1 pos 43"},"queryContext":[]} -- !query @@ -2361,7 +2361,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: date != smallint; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: date != smallint; line 1 pos 43"},"queryContext":[]} -- !query @@ -2370,7 +2370,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: date != int; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: date != int; line 1 pos 43"},"queryContext":[]} -- !query @@ -2379,7 +2379,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: date != bigint; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: date != bigint; line 1 pos 43"},"queryContext":[]} -- !query @@ -2388,7 +2388,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: date != float; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: date != float; line 1 pos 43"},"queryContext":[]} -- !query @@ -2397,7 +2397,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: date != double; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: date != double; line 1 pos 43"},"queryContext":[]} -- !query @@ -2406,7 +2406,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: date != decimal(10,0); line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: date != decimal(10,0); line 1 pos 43"},"queryContext":[]} -- !query @@ -2423,7 +2423,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: date != binary; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: date != binary; line 1 pos 43"},"queryContext":[]} -- !query @@ -2432,7 +2432,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: date != boolean; line 1 pos 43"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: date != boolean; line 1 pos 43"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out index db4b954ee06a1..a9e28801f6f9c 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out @@ -82,7 +82,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'map_zip_with(various_maps.decimal_map1, various_maps.decimal_map2, lambdafunction(struct(k, v1, v2), k, v1, v2))' due to argument data type mismatch: The input to function map_zip_with should have been two maps with compatible key types, but the key types are [decimal(36,0), decimal(36,35)].; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'map_zip_with(various_maps.decimal_map1, various_maps.decimal_map2, lambdafunction(struct(k, v1, v2), k, v1, v2))' due to argument data type mismatch: The input to function map_zip_with should have been two maps with compatible key types, but the key types are [decimal(36,0), decimal(36,35)].; line 1 pos 7"},"queryContext":[]} -- !query @@ -110,7 +110,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'map_zip_with(various_maps.decimal_map2, various_maps.int_map, lambdafunction(struct(k, v1, v2), k, v1, v2))' due to argument data type mismatch: The input to function map_zip_with should have been two maps with compatible key types, but the key types are [decimal(36,35), int].; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'map_zip_with(various_maps.decimal_map2, various_maps.int_map, lambdafunction(struct(k, v1, v2), k, v1, v2))' due to argument data type mismatch: The input to function map_zip_with should have been two maps with compatible key types, but the key types are [decimal(36,35), int].; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out index 3522516b2b5f1..2cb37040e5bcd 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out @@ -91,7 +91,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'map_concat(various_maps.tinyint_map1, various_maps.array_map1)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,array>]; line 2 pos 4"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'map_concat(various_maps.tinyint_map1, various_maps.array_map1)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,array>]; line 2 pos 4"},"queryContext":[]} -- !query @@ -102,7 +102,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'map_concat(various_maps.boolean_map1, various_maps.int_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map]; line 2 pos 4"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'map_concat(various_maps.boolean_map1, various_maps.int_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map]; line 2 pos 4"},"queryContext":[]} -- !query @@ -113,7 +113,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'map_concat(various_maps.int_map1, various_maps.struct_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,struct>]; line 2 pos 4"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'map_concat(various_maps.int_map1, various_maps.struct_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,struct>]; line 2 pos 4"},"queryContext":[]} -- !query @@ -124,7 +124,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'map_concat(various_maps.struct_map1, various_maps.array_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map,struct>, map,array>]; line 2 pos 4"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'map_concat(various_maps.struct_map1, various_maps.array_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map,struct>, map,array>]; line 2 pos 4"},"queryContext":[]} -- !query @@ -135,4 +135,4 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'map_concat(various_maps.int_map1, various_maps.array_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,array>]; line 2 pos 4"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'map_concat(various_maps.int_map1, various_maps.array_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,array>]; line 2 pos 4"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out index 54429cc7a2f63..516ba004fd6e2 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out @@ -77,7 +77,7 @@ SELECT '1' + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -86,7 +86,7 @@ SELECT '1' + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -95,7 +95,7 @@ SELECT '1' + cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -104,7 +104,7 @@ SELECT '1' + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -177,7 +177,7 @@ SELECT '1' - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) - CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) - CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -186,7 +186,7 @@ SELECT '1' - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) - CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) - CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -195,7 +195,7 @@ SELECT '1' - cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '('1' - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '('1' - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -276,7 +276,7 @@ SELECT '1' * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -285,7 +285,7 @@ SELECT '1' * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -294,7 +294,7 @@ SELECT '1' * cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -303,7 +303,7 @@ SELECT '1' * cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -376,7 +376,7 @@ SELECT '1' / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -385,7 +385,7 @@ SELECT '1' / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -394,7 +394,7 @@ SELECT '1' / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -403,7 +403,7 @@ SELECT '1' / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -476,7 +476,7 @@ SELECT '1' % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -485,7 +485,7 @@ SELECT '1' % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -494,7 +494,7 @@ SELECT '1' % cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -503,7 +503,7 @@ SELECT '1' % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -576,7 +576,7 @@ SELECT pmod('1', cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('1' AS BINARY))' (double and binary).; line 1 pos 7"},"queryContext":[]} -- !query @@ -585,7 +585,7 @@ SELECT pmod('1', cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7"},"queryContext":[]} -- !query @@ -594,7 +594,7 @@ SELECT pmod('1', cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7"},"queryContext":[]} -- !query @@ -603,7 +603,7 @@ SELECT pmod('1', cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7"},"queryContext":[]} -- !query @@ -668,7 +668,7 @@ SELECT cast('1' as binary) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -677,7 +677,7 @@ SELECT cast(1 as boolean) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) + CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) + CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -686,7 +686,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -695,7 +695,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -760,7 +760,7 @@ SELECT cast('1' as binary) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) - CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) - CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -769,7 +769,7 @@ SELECT cast(1 as boolean) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) - CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) - CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) - CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) - CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -778,7 +778,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - '1')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - '1')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -787,7 +787,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -852,7 +852,7 @@ SELECT cast('1' as binary) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -861,7 +861,7 @@ SELECT cast(1 as boolean) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) * CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) * CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -870,7 +870,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) * CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) * CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -879,7 +879,7 @@ SELECT cast('2017-12-11 09:30:00' as date) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) * CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) * CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -944,7 +944,7 @@ SELECT cast('1' as binary) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -953,7 +953,7 @@ SELECT cast(1 as boolean) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -962,7 +962,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -971,7 +971,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1036,7 +1036,7 @@ SELECT cast('1' as binary) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('1' AS BINARY) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('1' AS BINARY) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1045,7 +1045,7 @@ SELECT cast(1 as boolean) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST(1 AS BOOLEAN) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) % CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST(1 AS BOOLEAN) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) % CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1054,7 +1054,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1063,7 +1063,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1128,7 +1128,7 @@ SELECT pmod(cast('1' as binary), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('1' AS BINARY), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('1' AS BINARY), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1137,7 +1137,7 @@ SELECT pmod(cast(1 as boolean), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST(1 AS BOOLEAN), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS BOOLEAN), CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST(1 AS BOOLEAN), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS BOOLEAN), CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1146,7 +1146,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7"},"queryContext":[]} -- !query @@ -1155,7 +1155,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out index cbd1d288bcd6e..7177f7a5f7986 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out @@ -101,7 +101,7 @@ select cast(a as array) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 't.a' due to data type mismatch: cannot cast string to array; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 't.a' due to data type mismatch: cannot cast string to array; line 1 pos 7"},"queryContext":[]} -- !query @@ -110,7 +110,7 @@ select cast(a as struct) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 't.a' due to data type mismatch: cannot cast string to struct; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 't.a' due to data type mismatch: cannot cast string to struct; line 1 pos 7"},"queryContext":[]} -- !query @@ -119,7 +119,7 @@ select cast(a as map) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 't.a' due to data type mismatch: cannot cast string to map; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 't.a' due to data type mismatch: cannot cast string to map; line 1 pos 7"},"queryContext":[]} -- !query @@ -136,7 +136,7 @@ select to_timestamp('2018-01-01', a) from t struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'aa'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'aa'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -153,7 +153,7 @@ select to_unix_timestamp('2018-01-01', a) from t struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'aa'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'aa'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query @@ -170,7 +170,7 @@ select unix_timestamp('2018-01-01', a) from t struct<> -- !query output org.apache.spark.SparkUpgradeException -{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":["'aa'","\"spark.sql.legacy.timeParserPolicy\""],"queryContext":[]} +{"errorClass":"INCONSISTENT_BEHAVIOR_CROSS_VERSION","errorSubClass":"DATETIME_PATTERN_RECOGNITION","messageParameters":{"pattern":"'aa'","config":"\"spark.sql.legacy.timeParserPolicy\""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out index 96aff310ab25a..ca2fb6912fa5b 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out @@ -85,7 +85,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with tinyint at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with tinyint at same column of first table"},"queryContext":[]} -- !query @@ -94,7 +94,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with tinyint at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with tinyint at same column of first table"},"queryContext":[]} -- !query @@ -103,7 +103,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with tinyint at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with tinyint at same column of first table"},"queryContext":[]} -- !query @@ -112,7 +112,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with tinyint at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with tinyint at same column of first table"},"queryContext":[]} -- !query @@ -193,7 +193,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with smallint at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with smallint at same column of first table"},"queryContext":[]} -- !query @@ -202,7 +202,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with smallint at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with smallint at same column of first table"},"queryContext":[]} -- !query @@ -211,7 +211,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with smallint at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with smallint at same column of first table"},"queryContext":[]} -- !query @@ -220,7 +220,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast('2017-12-11 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with smallint at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with smallint at same column of first table"},"queryContext":[]} -- !query @@ -301,7 +301,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with int at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with int at same column of first table"},"queryContext":[]} -- !query @@ -310,7 +310,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with int at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with int at same column of first table"},"queryContext":[]} -- !query @@ -319,7 +319,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as timest struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with int at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with int at same column of first table"},"queryContext":[]} -- !query @@ -328,7 +328,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with int at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with int at same column of first table"},"queryContext":[]} -- !query @@ -409,7 +409,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with bigint at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with bigint at same column of first table"},"queryContext":[]} -- !query @@ -418,7 +418,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with bigint at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with bigint at same column of first table"},"queryContext":[]} -- !query @@ -427,7 +427,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as tim struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with bigint at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with bigint at same column of first table"},"queryContext":[]} -- !query @@ -436,7 +436,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with bigint at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with bigint at same column of first table"},"queryContext":[]} -- !query @@ -517,7 +517,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with float at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with float at same column of first table"},"queryContext":[]} -- !query @@ -526,7 +526,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with float at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with float at same column of first table"},"queryContext":[]} -- !query @@ -535,7 +535,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as time struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with float at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with float at same column of first table"},"queryContext":[]} -- !query @@ -544,7 +544,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with float at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with float at same column of first table"},"queryContext":[]} -- !query @@ -625,7 +625,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with double at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with double at same column of first table"},"queryContext":[]} -- !query @@ -634,7 +634,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with double at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with double at same column of first table"},"queryContext":[]} -- !query @@ -643,7 +643,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as tim struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with double at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with double at same column of first table"},"queryContext":[]} -- !query @@ -652,7 +652,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with double at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with double at same column of first table"},"queryContext":[]} -- !query @@ -733,7 +733,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with decimal(10,0) at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with decimal(10,0) at same column of first table"},"queryContext":[]} -- !query @@ -742,7 +742,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with decimal(10,0) at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with decimal(10,0) at same column of first table"},"queryContext":[]} -- !query @@ -751,7 +751,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast('2017-12-11 09:30:00.0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with decimal(10,0) at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with decimal(10,0) at same column of first table"},"queryContext":[]} -- !query @@ -760,7 +760,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast('2017-12-11 09:30:00' struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with decimal(10,0) at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with decimal(10,0) at same column of first table"},"queryContext":[]} -- !query @@ -841,7 +841,7 @@ SELECT cast(1 as string) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with string at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with string at same column of first table"},"queryContext":[]} -- !query @@ -850,7 +850,7 @@ SELECT cast(1 as string) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with string at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with string at same column of first table"},"queryContext":[]} -- !query @@ -877,7 +877,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with binary at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with binary at same column of first table"},"queryContext":[]} -- !query @@ -886,7 +886,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with binary at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with binary at same column of first table"},"queryContext":[]} -- !query @@ -895,7 +895,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with binary at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with binary at same column of first table"},"queryContext":[]} -- !query @@ -904,7 +904,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with binary at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with binary at same column of first table"},"queryContext":[]} -- !query @@ -913,7 +913,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with binary at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with binary at same column of first table"},"queryContext":[]} -- !query @@ -922,7 +922,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with binary at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with binary at same column of first table"},"queryContext":[]} -- !query @@ -931,7 +931,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with binary at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with binary at same column of first table"},"queryContext":[]} -- !query @@ -940,7 +940,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is string type which is not compatible with binary at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is string type which is not compatible with binary at same column of first table"},"queryContext":[]} -- !query @@ -958,7 +958,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with binary at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with binary at same column of first table"},"queryContext":[]} -- !query @@ -967,7 +967,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with binary at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with binary at same column of first table"},"queryContext":[]} -- !query @@ -976,7 +976,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast('2017-12-11 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with binary at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with binary at same column of first table"},"queryContext":[]} -- !query @@ -985,7 +985,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with boolean at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with boolean at same column of first table"},"queryContext":[]} -- !query @@ -994,7 +994,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with boolean at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with boolean at same column of first table"},"queryContext":[]} -- !query @@ -1003,7 +1003,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with boolean at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with boolean at same column of first table"},"queryContext":[]} -- !query @@ -1012,7 +1012,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with boolean at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with boolean at same column of first table"},"queryContext":[]} -- !query @@ -1021,7 +1021,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with boolean at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with boolean at same column of first table"},"queryContext":[]} -- !query @@ -1030,7 +1030,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with boolean at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with boolean at same column of first table"},"queryContext":[]} -- !query @@ -1039,7 +1039,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with boolean at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with boolean at same column of first table"},"queryContext":[]} -- !query @@ -1048,7 +1048,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is string type which is not compatible with boolean at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is string type which is not compatible with boolean at same column of first table"},"queryContext":[]} -- !query @@ -1057,7 +1057,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with boolean at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with boolean at same column of first table"},"queryContext":[]} -- !query @@ -1074,7 +1074,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with boolean at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with boolean at same column of first table"},"queryContext":[]} -- !query @@ -1083,7 +1083,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with boolean at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with boolean at same column of first table"},"queryContext":[]} -- !query @@ -1092,7 +1092,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with timestamp at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with timestamp at same column of first table"},"queryContext":[]} -- !query @@ -1101,7 +1101,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with timestamp at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with timestamp at same column of first table"},"queryContext":[]} -- !query @@ -1110,7 +1110,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with timestamp at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with timestamp at same column of first table"},"queryContext":[]} -- !query @@ -1119,7 +1119,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with timestamp at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with timestamp at same column of first table"},"queryContext":[]} -- !query @@ -1128,7 +1128,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with timestamp at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with timestamp at same column of first table"},"queryContext":[]} -- !query @@ -1137,7 +1137,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with timestamp at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with timestamp at same column of first table"},"queryContext":[]} -- !query @@ -1146,7 +1146,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with timestamp at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with timestamp at same column of first table"},"queryContext":[]} -- !query @@ -1164,7 +1164,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast('2' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with timestamp at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with timestamp at same column of first table"},"queryContext":[]} -- !query @@ -1173,7 +1173,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with timestamp at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with timestamp at same column of first table"},"queryContext":[]} -- !query @@ -1200,7 +1200,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as tinyint struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with date at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with date at same column of first table"},"queryContext":[]} -- !query @@ -1209,7 +1209,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as smallin struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with date at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with date at same column of first table"},"queryContext":[]} -- !query @@ -1218,7 +1218,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as int) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with date at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with date at same column of first table"},"queryContext":[]} -- !query @@ -1227,7 +1227,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as bigint) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with date at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with date at same column of first table"},"queryContext":[]} -- !query @@ -1236,7 +1236,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as float) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with date at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with date at same column of first table"},"queryContext":[]} -- !query @@ -1245,7 +1245,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as double) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with date at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with date at same column of first table"},"queryContext":[]} -- !query @@ -1254,7 +1254,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as decimal struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with date at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with date at same column of first table"},"queryContext":[]} -- !query @@ -1272,7 +1272,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast('2' as binar struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with date at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with date at same column of first table"},"queryContext":[]} -- !query @@ -1281,7 +1281,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as boolean struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with date at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with date at same column of first table"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out index 3ef0c23a2b27a..27591440de548 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out @@ -165,7 +165,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast(1 as string) DESC RANGE BETWE struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS STRING) FOLLOWING' due to data type mismatch: The data type of the upper bound 'string' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS STRING) FOLLOWING' due to data type mismatch: The data type of the upper bound 'string' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21"},"queryContext":[]} -- !query @@ -174,7 +174,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast('1' as binary) DESC RANGE BET struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS BINARY) FOLLOWING' due to data type mismatch: The data type of the upper bound 'binary' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS BINARY) FOLLOWING' due to data type mismatch: The data type of the upper bound 'binary' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21"},"queryContext":[]} -- !query @@ -183,7 +183,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast(1 as boolean) DESC RANGE BETW struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS BOOLEAN) FOLLOWING' due to data type mismatch: The data type of the upper bound 'boolean' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS BOOLEAN) FOLLOWING' due to data type mismatch: The data type of the upper bound 'boolean' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21"},"queryContext":[]} -- !query @@ -192,7 +192,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY 1 ORDER BY CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) DESC NULLS LAST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 21"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(PARTITION BY 1 ORDER BY CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) DESC NULLS LAST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 21"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udaf.sql.out b/sql/core/src/test/resources/sql-tests/results/udaf.sql.out index e76bfab8b950b..91032a10f5770 100644 --- a/sql/core/src/test/resources/sql-tests/results/udaf.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udaf.sql.out @@ -31,7 +31,7 @@ SELECT default.myDoubleAvg(int_col1, 3) as my_avg from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function spark_catalog.default.myDoubleAvg. Expected: 1; Found: 2; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function spark_catalog.default.myDoubleAvg. Expected: 1; Found: 2; line 1 pos 7"},"queryContext":[]} -- !query @@ -48,7 +48,7 @@ SELECT default.udaf1(int_col1) as udaf1 from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Can not load class 'test.non.existent.udaf' when registering the function 'spark_catalog.default.udaf1', please make sure it is on the classpath; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Can not load class 'test.non.existent.udaf' when registering the function 'spark_catalog.default.udaf1', please make sure it is on the classpath; line 1 pos 7"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out index 980904ff90467..611e275827ce2 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out @@ -475,7 +475,7 @@ having exists (select 1 from onek b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: sum(DISTINCT (outer(a.four) + b.four))"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: sum(DISTINCT (outer(a.four) + b.four))"},"queryContext":[]} -- !query @@ -486,4 +486,4 @@ from tenk1 o struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`o`.`unique1`","`i`.`unique1`, `i`.`unique2`, `i`.`hundred`, `i`.`even`, `i`.`four`, `i`.`stringu1`, `i`.`ten`, `i`.`odd`, `i`.`string4`, `i`.`stringu2`, `i`.`tenthous`, `i`.`twenty`, `i`.`two`, `i`.`thousand`, `i`.`fivethous`, `i`.`twothousand`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`o`.`unique1`","objectList":"`i`.`unique1`, `i`.`unique2`, `i`.`hundred`, `i`.`even`, `i`.`four`, `i`.`stringu1`, `i`.`ten`, `i`.`odd`, `i`.`string4`, `i`.`stringu2`, `i`.`tenthous`, `i`.`twenty`, `i`.`two`, `i`.`thousand`, `i`.`fivethous`, `i`.`twothousand`"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out index dc93cee640741..c55608446e8ab 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out @@ -5,7 +5,7 @@ select udf(max(min(unique1))) from tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out index 00dba9f0336ea..9e2f8d1c0a915 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out @@ -546,7 +546,7 @@ SELECT udf('') AS `xxx`, udf(i) AS i, udf(k), udf(t) AS t struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'i' is ambiguous, could be: spark_catalog.default.j1_tbl.i, spark_catalog.default.j2_tbl.i.; line 1 pos 29"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'i' is ambiguous, could be: spark_catalog.default.j1_tbl.i, spark_catalog.default.j2_tbl.i.; line 1 pos 29"},"queryContext":[]} -- !query @@ -3263,7 +3263,7 @@ select * from struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'f1' is ambiguous, could be: j.f1, j.f1.; line 2 pos 72"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'f1' is ambiguous, could be: j.f1, j.f1.; line 2 pos 72"},"queryContext":[]} -- !query @@ -3273,7 +3273,7 @@ select * from struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`y`.`f1`","`j`.`f1`, `j`.`f1`, `x`.`q1`, `x`.`q2`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`y`.`f1`","objectList":"`j`.`f1`, `j`.`f1`, `x`.`q1`, `x`.`q2`"},"queryContext":[]} -- !query @@ -3292,7 +3292,7 @@ select udf(t1.uunique1) from struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t1`.`uunique1`","`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`stringu1`, `t1`.`even`, `t1`.`four`, `t1`.`string4`, `t2`.`stringu1`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`even`, `t2`.`four`, `t1`.`odd`, `t2`.`string4`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`odd`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`t1`.`uunique1`","objectList":"`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`stringu1`, `t1`.`even`, `t1`.`four`, `t1`.`string4`, `t2`.`stringu1`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`even`, `t2`.`four`, `t1`.`odd`, `t2`.`string4`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`odd`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`"},"queryContext":[]} -- !query @@ -3302,7 +3302,7 @@ select udf(udf(t2.uunique1)) from struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`t2`.`uunique1`","`t2`.`unique1`, `t1`.`unique1`, `t2`.`unique2`, `t1`.`unique2`, `t2`.`hundred`, `t1`.`hundred`, `t2`.`stringu1`, `t2`.`even`, `t2`.`four`, `t2`.`string4`, `t1`.`stringu1`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`even`, `t1`.`four`, `t2`.`odd`, `t1`.`string4`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`odd`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`twothousand`, `t1`.`twothousand`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`t2`.`uunique1`","objectList":"`t2`.`unique1`, `t1`.`unique1`, `t2`.`unique2`, `t1`.`unique2`, `t2`.`hundred`, `t1`.`hundred`, `t2`.`stringu1`, `t2`.`even`, `t2`.`four`, `t2`.`string4`, `t1`.`stringu1`, `t2`.`stringu2`, `t2`.`ten`, `t2`.`tenthous`, `t1`.`even`, `t1`.`four`, `t2`.`odd`, `t1`.`string4`, `t1`.`stringu2`, `t1`.`ten`, `t1`.`tenthous`, `t2`.`thousand`, `t2`.`twenty`, `t2`.`two`, `t2`.`fivethous`, `t1`.`odd`, `t1`.`thousand`, `t1`.`twenty`, `t1`.`two`, `t1`.`fivethous`, `t2`.`twothousand`, `t1`.`twothousand`"},"queryContext":[]} -- !query @@ -3312,7 +3312,7 @@ select udf(uunique1) from struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`uunique1`","`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`even`, `t2`.`even`, `t1`.`four`, `t2`.`four`, `t1`.`ten`, `t2`.`ten`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`odd`, `t2`.`odd`, `t1`.`two`, `t2`.`two`, `t1`.`stringu1`, `t2`.`stringu1`, `t1`.`twenty`, `t2`.`twenty`, `t1`.`string4`, `t2`.`string4`, `t1`.`stringu2`, `t2`.`stringu2`, `t1`.`tenthous`, `t2`.`tenthous`, `t1`.`thousand`, `t2`.`thousand`, `t1`.`fivethous`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`uunique1`","objectList":"`t1`.`unique1`, `t2`.`unique1`, `t1`.`unique2`, `t2`.`unique2`, `t1`.`even`, `t2`.`even`, `t1`.`four`, `t2`.`four`, `t1`.`ten`, `t2`.`ten`, `t1`.`hundred`, `t2`.`hundred`, `t1`.`odd`, `t2`.`odd`, `t1`.`two`, `t2`.`two`, `t1`.`stringu1`, `t2`.`stringu1`, `t1`.`twenty`, `t2`.`twenty`, `t1`.`string4`, `t2`.`string4`, `t1`.`stringu2`, `t2`.`stringu2`, `t1`.`tenthous`, `t2`.`tenthous`, `t1`.`thousand`, `t2`.`thousand`, `t1`.`fivethous`, `t2`.`fivethous`, `t1`.`twothousand`, `t2`.`twothousand`"},"queryContext":[]} -- !query @@ -3512,7 +3512,7 @@ select udf(udf(f1,g)) from int4_tbl a, (select udf(udf(f1)) as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`f1`",""],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`f1`","objectList":""},"queryContext":[]} -- !query @@ -3521,7 +3521,7 @@ select udf(f1,g) from int4_tbl a, (select a.f1 as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`.`f1`",""],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`a`.`f1`","objectList":""},"queryContext":[]} -- !query @@ -3530,7 +3530,7 @@ select udf(udf(f1,g)) from int4_tbl a cross join (select udf(f1) as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`f1`",""],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`f1`","objectList":""},"queryContext":[]} -- !query @@ -3539,7 +3539,7 @@ select udf(f1,g) from int4_tbl a cross join (select udf(udf(a.f1)) as g) ss struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`.`f1`",""],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`a`.`f1`","objectList":""},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out index 1d665b7d0e892..3cd64beba0f4c 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out @@ -140,7 +140,7 @@ SELECT udf(a) FROM test_having HAVING udf(min(a)) < udf(max(a)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'spark_catalog.default.test_having.a' is not an aggregate function. Wrap '(min(spark_catalog.default.test_having.a) AS `min(a#x)`, max(spark_catalog.default.test_having.a) AS `max(a#x)`)' in windowing function(s) or wrap 'spark_catalog.default.test_having.a' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"grouping expressions sequence is empty, and 'spark_catalog.default.test_having.a' is not an aggregate function. Wrap '(min(spark_catalog.default.test_having.a) AS `min(a#x)`, max(spark_catalog.default.test_having.a) AS `max(a#x)`)' in windowing function(s) or wrap 'spark_catalog.default.test_having.a' in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -149,7 +149,7 @@ SELECT 1 AS one FROM test_having HAVING udf(a) > 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`a`","`one`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`a`","objectList":"`one`"},"queryContext":[]} -- !query @@ -174,7 +174,7 @@ SELECT 1 AS one FROM test_having WHERE 1/udf(a) = 1 HAVING 1 < 2 struct<> -- !query output org.apache.spark.SparkArithmeticException -{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":["\"spark.sql.ansi.enabled\""],"queryContext":[{"objectType":"","objectName":"","startIndex":39,"stopIndex":46,"fragment":"1/udf(a"}]} +{"errorClass":"DIVIDE_BY_ZERO","sqlState":"22012","messageParameters":{"config":"\"spark.sql.ansi.enabled\""},"queryContext":[{"objectType":"","objectName":"","startIndex":39,"stopIndex":46,"fragment":"1/udf(a"}]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out index 02ada1190b8e3..134bdaa73b8e3 100755 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out @@ -122,7 +122,7 @@ SELECT udf(count(*)) FROM test_missing_target GROUP BY udf(a) ORDER BY udf(b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`b`","`udf(count(1))`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`b`","objectList":"`udf(count(1))`"},"queryContext":[]} -- !query @@ -203,7 +203,7 @@ SELECT udf(c), udf(count(*)) FROM test_missing_target GROUP BY 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 63"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 63"},"queryContext":[]} -- !query @@ -214,7 +214,7 @@ SELECT udf(count(*)) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 14"},"queryContext":[]} -- !query @@ -327,7 +327,7 @@ SELECT udf(count(udf(a))) FROM test_missing_target GROUP BY udf(a) ORDER BY udf( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`b`","`udf(count(udf(a)))`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`b`","objectList":"`udf(count(udf(a)))`"},"queryContext":[]} -- !query @@ -390,7 +390,7 @@ SELECT udf(count(udf(x.a))) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 14"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 14"},"queryContext":[]} -- !query @@ -415,7 +415,7 @@ SELECT udf(count(udf(b))) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Reference 'b' is ambiguous, could be: x.b, y.b.; line 1 pos 21"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Reference 'b' is ambiguous, could be: x.b, y.b.; line 1 pos 21"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out index 38d551625b98a..89d24399896cf 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out @@ -138,7 +138,7 @@ SELECT array(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["ExceptAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"ExceptAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table"},"queryContext":[]} -- !query @@ -210,7 +210,7 @@ SELECT k, v FROM tab4 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["ExceptAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"ExceptAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out index 05638b055ec9b..d73bb83396492 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out @@ -100,4 +100,4 @@ WHERE udf(t1.v) >= (SELECT min(udf(t2.v)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Correlated column is not allowed in predicate (CAST(udf(cast(k as string)) AS STRING) = CAST(udf(cast(outer(k#x) as string)) AS STRING)):\nAggregate [cast(udf(cast(max(cast(udf(cast(v#x as string)) as int)) as string)) as int) AS udf(max(udf(v)))#x]\n+- Filter (cast(udf(cast(k#x as string)) as string) = cast(udf(cast(outer(k#x) as string)) as string))\n +- SubqueryAlias t2\n +- View (`t2`, [k#x,v#x])\n +- Project [cast(k#x as string) AS k#x, cast(v#x as int) AS v#x]\n +- Project [k#x, v#x]\n +- SubqueryAlias t2\n +- LocalRelation [k#x, v#x]\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Correlated column is not allowed in predicate (CAST(udf(cast(k as string)) AS STRING) = CAST(udf(cast(outer(k#x) as string)) AS STRING)):\nAggregate [cast(udf(cast(max(cast(udf(cast(v#x as string)) as int)) as string)) as int) AS udf(max(udf(v)))#x]\n+- Filter (cast(udf(cast(k#x as string)) as string) = cast(udf(cast(outer(k#x) as string)) as string))\n +- SubqueryAlias t2\n +- View (`t2`, [k#x,v#x])\n +- Project [cast(k#x as string) AS k#x, cast(v#x as int) AS v#x]\n +- Project [k#x, v#x]\n +- SubqueryAlias t2\n +- LocalRelation [k#x, v#x]\n"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out index 597a30d222ce2..cfd056dd645fb 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out @@ -207,7 +207,7 @@ SELECT course, udf(year), GROUPING(course) FROM courseSales GROUP BY course, udf struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["grouping() can only be used with GroupingSets/Cube/Rollup"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"grouping() can only be used with GroupingSets/Cube/Rollup"},"queryContext":[]} -- !query @@ -216,7 +216,7 @@ SELECT course, udf(year), GROUPING_ID(course, year) FROM courseSales GROUP BY ud struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["grouping_id() can only be used with GroupingSets/Cube/Rollup"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"grouping_id() can only be used with GroupingSets/Cube/Rollup"},"queryContext":[]} -- !query @@ -252,7 +252,7 @@ SELECT course, udf(year) FROM courseSales GROUP BY udf(course), year HAVING GROU struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":{},"queryContext":[]} -- !query @@ -261,7 +261,7 @@ SELECT course, udf(udf(year)) FROM courseSales GROUP BY course, year HAVING GROU struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":{},"queryContext":[]} -- !query @@ -316,7 +316,7 @@ SELECT course, udf(year) FROM courseSales GROUP BY course, udf(year) ORDER BY GR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":{},"queryContext":[]} -- !query @@ -325,7 +325,7 @@ SELECT course, udf(year) FROM courseSales GROUP BY course, udf(year) ORDER BY GR struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":[],"queryContext":[]} +{"errorClass":"UNSUPPORTED_GROUPING_EXPRESSION","messageParameters":{},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out index 1eae366102452..324d9c2d3e318 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out @@ -15,7 +15,7 @@ SELECT udf(a), udf(COUNT(b)) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(CAST(udf(cast(count(b) as string)) AS BIGINT) AS `udf(count(b))`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(CAST(udf(cast(count(b) as string)) AS BIGINT) AS `udf(count(b))`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -43,7 +43,7 @@ SELECT udf(a), udf(COUNT(udf(b))) FROM testData GROUP BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -107,7 +107,7 @@ SELECT udf(a + 2), udf(COUNT(b)) FROM testData GROUP BY a + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -164,7 +164,7 @@ SELECT udf(COUNT(b)) AS k FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["aggregate functions are not allowed in GROUP BY, but found CAST(udf(cast(count(b) as string)) AS BIGINT)"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"aggregate functions are not allowed in GROUP BY, but found CAST(udf(cast(count(b) as string)) AS BIGINT)"},"queryContext":[]} -- !query @@ -182,7 +182,7 @@ SELECT k AS a, udf(COUNT(udf(v))) FROM testDataHasSameNameWithAlias GROUP BY udf struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expression 'testdatahassamenamewithalias.k' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expression 'testdatahassamenamewithalias.k' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -199,7 +199,7 @@ SELECT a AS k, udf(COUNT(udf(b))) FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`k`","`testdata`.`a`, `testdata`.`b`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`k`","objectList":"`testdata`.`a`, `testdata`.`b`"},"queryContext":[]} -- !query @@ -271,7 +271,7 @@ SELECT udf(id) FROM range(10) HAVING id > 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["grouping expressions sequence is empty, and 'id' is not an aggregate function. Wrap '()' in windowing function(s) or wrap 'id' in first() (or first_value) if you don't care which value you get."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"grouping expressions sequence is empty, and 'id' is not an aggregate function. Wrap '()' in windowing function(s) or wrap 'id' in first() (or first_value) if you don't care which value you get."},"queryContext":[]} -- !query @@ -377,7 +377,7 @@ SELECT every(udf(1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'every(CAST(udf(cast(1 as string)) AS INT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS INT)' is of int type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'every(CAST(udf(cast(1 as string)) AS INT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS INT)' is of int type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -386,7 +386,7 @@ SELECT some(udf(1S)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'some(CAST(udf(cast(1 as string)) AS SMALLINT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS SMALLINT)' is of smallint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'some(CAST(udf(cast(1 as string)) AS SMALLINT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS SMALLINT)' is of smallint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -395,7 +395,7 @@ SELECT any(udf(1L)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'any(CAST(udf(cast(1 as string)) AS BIGINT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS BIGINT)' is of bigint type.; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'any(CAST(udf(cast(1 as string)) AS BIGINT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS BIGINT)' is of bigint type.; line 1 pos 7"},"queryContext":[]} -- !query @@ -404,7 +404,7 @@ SELECT udf(every("true")) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'every('true')' due to data type mismatch: argument 1 requires boolean type, however, ''true'' is of string type.; line 1 pos 11"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'every('true')' due to data type mismatch: argument 1 requires boolean type, however, ''true'' is of string type.; line 1 pos 11"},"queryContext":[]} -- !query @@ -490,7 +490,7 @@ SELECT udf(count(*)) FROM test_agg WHERE count(*) > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(count(1) > 1L)]\nInvalid expressions: [count(1)]"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(count(1) > 1L)]\nInvalid expressions: [count(1)]"},"queryContext":[]} -- !query @@ -499,7 +499,7 @@ SELECT udf(count(*)) FROM test_agg WHERE count(*) + 1L > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [((count(1) + 1L) > 1L)]\nInvalid expressions: [count(1)]"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [((count(1) + 1L) > 1L)]\nInvalid expressions: [count(1)]"},"queryContext":[]} -- !query @@ -508,4 +508,4 @@ SELECT udf(count(*)) FROM test_agg WHERE k = 1 or k = 2 or count(*) + 1L > 1L or struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(((test_agg.k = 1) OR (test_agg.k = 2)) OR (((count(1) + 1L) > 1L) OR (max(test_agg.k) > 1)))]\nInvalid expressions: [count(1), max(test_agg.k)]"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(((test_agg.k = 1) OR (test_agg.k = 2)) OR (((count(1) + 1L) > 1L) OR (max(test_agg.k) > 1)))]\nInvalid expressions: [count(1), max(test_agg.k)]"},"queryContext":[]} diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out index 3c4060abad878..03f160ba23c73 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out @@ -94,7 +94,7 @@ select udf(a), b from values ("one", rand(5)), ("two", 3.0D) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot evaluate expression rand(5) in inline table definition; line 1 pos 37"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot evaluate expression rand(5) in inline table definition; line 1 pos 37"},"queryContext":[]} -- !query @@ -103,7 +103,7 @@ select udf(a), udf(b) from values ("one", 2.0), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expected 2 columns but found 1 columns in row 1; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expected 2 columns but found 1 columns in row 1; line 1 pos 27"},"queryContext":[]} -- !query @@ -112,7 +112,7 @@ select udf(a), udf(b) from values ("one", array(0, 1)), ("two", struct(1, 2)) as struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["incompatible types found in column b for inline table; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"incompatible types found in column b for inline table; line 1 pos 27"},"queryContext":[]} -- !query @@ -121,7 +121,7 @@ select udf(a), udf(b) from values ("one"), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["expected 2 columns but found 1 columns in row 0; line 1 pos 27"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"expected 2 columns but found 1 columns in row 0; line 1 pos 27"},"queryContext":[]} -- !query @@ -130,7 +130,7 @@ select udf(a), udf(b) from values ("one", random_not_exist_func(1)), ("two", 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Undefined function: random_not_exist_func. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.random_not_exist_func.; line 1 pos 42"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Undefined function: random_not_exist_func. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.random_not_exist_func.; line 1 pos 42"},"queryContext":[]} -- !query @@ -139,7 +139,7 @@ select udf(a), udf(b) from values ("one", count(1)), ("two", 2) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot evaluate expression count(1) in inline table definition; line 1 pos 42"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot evaluate expression count(1) in inline table definition; line 1 pos 42"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out index a42c4d162531d..3868895b63f79 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out @@ -95,7 +95,7 @@ SELECT array(1), udf(2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["IntersectAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"IntersectAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table"},"queryContext":[]} -- !query @@ -106,7 +106,7 @@ SELECT udf(k), udf(v) FROM tab2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["IntersectAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"IntersectAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out index ef7d02985b05c..1e0b37ec3e7f6 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out @@ -199,7 +199,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Aggregate expression required for pivot, but 'coursesales.earnings' did not appear in any aggregate function."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Aggregate expression required for pivot, but 'coursesales.earnings' did not appear in any aggregate function."},"queryContext":[]} -- !query @@ -214,7 +214,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Aggregate expression required for pivot, but '__auto_generated_subquery_name.year' did not appear in any aggregate function."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Aggregate expression required for pivot, but '__auto_generated_subquery_name.year' did not appear in any aggregate function."},"queryContext":[]} -- !query @@ -229,7 +229,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`year`","`__auto_generated_subquery_name`.`course`, `__auto_generated_subquery_name`.`earnings`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`year`","objectList":"`__auto_generated_subquery_name`.`course`, `__auto_generated_subquery_name`.`earnings`"},"queryContext":[]} -- !query @@ -259,7 +259,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query."},"queryContext":[]} -- !query @@ -310,7 +310,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"PIVOT_VALUE_DATA_TYPE_MISMATCH","sqlState":"42000","messageParameters":["dotNET","string","struct"],"queryContext":[]} +{"errorClass":"PIVOT_VALUE_DATA_TYPE_MISMATCH","sqlState":"42000","messageParameters":{"value":"dotNET","valueType":"string","pivotType":"struct"},"queryContext":[]} -- !query @@ -323,7 +323,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":["`s`","`coursesales`.`year`, `coursesales`.`course`, `coursesales`.`earnings`"],"queryContext":[]} +{"errorClass":"UNRESOLVED_COLUMN","sqlState":"42000","messageParameters":{"objectName":"`s`","objectList":"`coursesales`.`year`, `coursesales`.`course`, `coursesales`.`earnings`"},"queryContext":[]} -- !query @@ -336,7 +336,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"NON_LITERAL_PIVOT_VALUES","sqlState":"42000","messageParameters":["\"course\""],"queryContext":[]} +{"errorClass":"NON_LITERAL_PIVOT_VALUES","sqlState":"42000","messageParameters":{"expression":"\"course\""},"queryContext":[]} -- !query @@ -421,7 +421,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"INCOMPARABLE_PIVOT_COLUMN","sqlState":"42000","messageParameters":["`__auto_generated_subquery_name`.`m`"],"queryContext":[]} +{"errorClass":"INCOMPARABLE_PIVOT_COLUMN","sqlState":"42000","messageParameters":{"columnName":"`__auto_generated_subquery_name`.`m`"},"queryContext":[]} -- !query @@ -438,7 +438,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"INCOMPARABLE_PIVOT_COLUMN","sqlState":"42000","messageParameters":["`named_struct('course', __auto_generated_subquery_name`.`course, 'm', __auto_generated_subquery_name`.`m)`"],"queryContext":[]} +{"errorClass":"INCOMPARABLE_PIVOT_COLUMN","sqlState":"42000","messageParameters":{"columnName":"`named_struct('course', __auto_generated_subquery_name`.`course, 'm', __auto_generated_subquery_name`.`m)`"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out index a7441bb82e04e..ecbf540fd4673 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out @@ -31,7 +31,7 @@ SELECT default.myDoubleAvg(udf(int_col1), udf(3)) as my_avg from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Invalid number of arguments for function spark_catalog.default.myDoubleAvg. Expected: 1; Found: 2; line 1 pos 7"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Invalid number of arguments for function spark_catalog.default.myDoubleAvg. Expected: 1; Found: 2; line 1 pos 7"},"queryContext":[]} -- !query @@ -48,7 +48,7 @@ SELECT default.udaf1(udf(int_col1)) as udaf1, udf(default.udaf1(udf(int_col1))) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Can not load class 'test.non.existent.udaf' when registering the function 'spark_catalog.default.udaf1', please make sure it is on the classpath; line 1 pos 94"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Can not load class 'test.non.existent.udaf' when registering the function 'spark_catalog.default.udaf1', please make sure it is on the classpath; line 1 pos 94"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out index 80ef954652169..75f730fb34bf9 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out @@ -58,7 +58,7 @@ ROWS BETWEEN CURRENT ROW AND 2147483648 FOLLOWING) FROM testData ORDER BY udf(ca struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'ROWS BETWEEN CURRENT ROW AND 2147483648L FOLLOWING' due to data type mismatch: The data type of the upper bound 'bigint' does not match the expected data type 'int'.; line 1 pos 46"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'ROWS BETWEEN CURRENT ROW AND 2147483648L FOLLOWING' due to data type mismatch: The data type of the upper bound 'bigint' does not match the expected data type 'int'.; line 1 pos 46"},"queryContext":[]} -- !query @@ -188,7 +188,7 @@ ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING) FROM testData ORDER BY cate, u struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING' due to data type mismatch: Window frame upper bound '1' does not follow the lower bound 'unboundedfollowing$()'.; line 1 pos 38"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING' due to data type mismatch: Window frame upper bound '1' does not follow the lower bound 'unboundedfollowing$()'.; line 1 pos 38"},"queryContext":[]} -- !query @@ -198,7 +198,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, udf(val) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 38"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 38"},"queryContext":[]} -- !query @@ -208,7 +208,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, udf(val) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY CAST(udf(cast(val as string)) AS INT) ASC NULLS FIRST, testdata.cate ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: cast(udf(cast(val#x as string)) as int) ASC NULLS FIRST,cate#x ASC NULLS FIRST; line 1 pos 38"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY CAST(udf(cast(val as string)) AS INT) ASC NULLS FIRST, testdata.cate ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: cast(udf(cast(val#x as string)) as int) ASC NULLS FIRST,cate#x ASC NULLS FIRST; line 1 pos 38"},"queryContext":[]} -- !query @@ -218,7 +218,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, udf(val) struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY current_timestamp() ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 38"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY current_timestamp() ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 38"},"queryContext":[]} -- !query @@ -228,7 +228,7 @@ RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING) FROM testData ORDER BY udf(cate), val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING' due to data type mismatch: The lower bound of a window frame must be less than or equal to the upper bound; line 1 pos 38"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING' due to data type mismatch: The lower bound of a window frame must be less than or equal to the upper bound; line 1 pos 38"},"queryContext":[]} -- !query @@ -238,7 +238,7 @@ RANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cat struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nFrame bound value must be a literal.(line 2, pos 30)\n\n== SQL ==\nSELECT udf(val), cate, count(val) OVER(PARTITION BY udf(cate) ORDER BY udf(val)\nRANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cate, val(val)\n------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nFrame bound value must be a literal.(line 2, pos 30)\n\n== SQL ==\nSELECT udf(val), cate, count(val) OVER(PARTITION BY udf(cate) ORDER BY udf(val)\nRANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cate, val(val)\n------------------------------^^^\n"},"queryContext":[]} -- !query @@ -315,7 +315,7 @@ SELECT udf(val), cate, row_number() OVER(PARTITION BY cate) FROM testData ORDER struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Window function row_number() requires window to be ordered, please add ORDER BY clause. For example SELECT row_number()(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Window function row_number() requires window to be ordered, please add ORDER BY clause. For example SELECT row_number()(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table"},"queryContext":[]} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/window.sql.out b/sql/core/src/test/resources/sql-tests/results/window.sql.out index afd16c8998ceb..cb27ec5d700e1 100644 --- a/sql/core/src/test/resources/sql-tests/results/window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/window.sql.out @@ -102,7 +102,7 @@ ROWS BETWEEN CURRENT ROW AND 2147483648 FOLLOWING) FROM testData ORDER BY cate, struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'ROWS BETWEEN CURRENT ROW AND 2147483648L FOLLOWING' due to data type mismatch: The data type of the upper bound 'bigint' does not match the expected data type 'int'.; line 1 pos 41"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'ROWS BETWEEN CURRENT ROW AND 2147483648L FOLLOWING' due to data type mismatch: The data type of the upper bound 'bigint' does not match the expected data type 'int'.; line 1 pos 41"},"queryContext":[]} -- !query @@ -324,7 +324,7 @@ ORDER BY cate, val_date struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val_date ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1 02:03:04.001' DAY TO SECOND FOLLOWING)' due to data type mismatch: The data type 'date' used in the order specification does not match the data type 'interval day to second' which is used in the range frame.; line 1 pos 46"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val_date ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1 02:03:04.001' DAY TO SECOND FOLLOWING)' due to data type mismatch: The data type 'date' used in the order specification does not match the data type 'interval day to second' which is used in the range frame.; line 1 pos 46"},"queryContext":[]} -- !query @@ -351,7 +351,7 @@ ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING) FROM testData ORDER BY cate, v struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING' due to data type mismatch: Window frame upper bound '1' does not follow the lower bound 'unboundedfollowing$()'.; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING' due to data type mismatch: Window frame upper bound '1' does not follow the lower bound 'unboundedfollowing$()'.; line 1 pos 33"},"queryContext":[]} -- !query @@ -361,7 +361,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY testdata.cate RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(PARTITION BY testdata.cate RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 33"},"queryContext":[]} -- !query @@ -371,7 +371,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val ASC NULLS FIRST, testdata.cate ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: val#x ASC NULLS FIRST,cate#x ASC NULLS FIRST; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val ASC NULLS FIRST, testdata.cate ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: val#x ASC NULLS FIRST,cate#x ASC NULLS FIRST; line 1 pos 33"},"queryContext":[]} -- !query @@ -381,7 +381,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve '(PARTITION BY testdata.cate ORDER BY current_timestamp() ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve '(PARTITION BY testdata.cate ORDER BY current_timestamp() ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 33"},"queryContext":[]} -- !query @@ -391,7 +391,7 @@ RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["cannot resolve 'RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING' due to data type mismatch: The lower bound of a window frame must be less than or equal to the upper bound; line 1 pos 33"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"cannot resolve 'RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING' due to data type mismatch: The lower bound of a window frame must be less than or equal to the upper bound; line 1 pos 33"},"queryContext":[]} -- !query @@ -401,7 +401,7 @@ RANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cat struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"legacy","messageParameters":["\nFrame bound value must be a literal.(line 2, pos 30)\n\n== SQL ==\nSELECT val, cate, count(val) OVER(PARTITION BY cate ORDER BY val\nRANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cate, val\n------------------------------^^^\n"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"\nFrame bound value must be a literal.(line 2, pos 30)\n\n== SQL ==\nSELECT val, cate, count(val) OVER(PARTITION BY cate ORDER BY val\nRANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cate, val\n------------------------------^^^\n"},"queryContext":[]} -- !query @@ -478,7 +478,7 @@ SELECT val, cate, row_number() OVER(PARTITION BY cate) FROM testData ORDER BY ca struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Window function row_number() requires window to be ordered, please add ORDER BY clause. For example SELECT row_number()(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table"],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Window function row_number() requires window to be ordered, please add ORDER BY clause. For example SELECT row_number()(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table"},"queryContext":[]} -- !query @@ -550,7 +550,7 @@ FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["window aggregate function with filter predicate is not supported yet."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"window aggregate function with filter predicate is not supported yet."},"queryContext":[]} -- !query @@ -904,7 +904,7 @@ ORDER BY salary DESC struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{"errorClass":"INVALID_SQL_SYNTAX","sqlState":"42000","messageParameters":["The definition of window `w` is repetitive."],"queryContext":[]} +{"errorClass":"INVALID_SQL_SYNTAX","sqlState":"42000","messageParameters":{"inputString":"The definition of window `w` is repetitive."},"queryContext":[]} -- !query @@ -1185,7 +1185,7 @@ FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Window specification w is not defined in the WINDOW clause."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Window specification w is not defined in the WINDOW clause."},"queryContext":[]} -- !query @@ -1197,4 +1197,4 @@ FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{"errorClass":"legacy","messageParameters":["Window specification w is not defined in the WINDOW clause."],"queryContext":[]} +{"errorClass":"legacy","messageParameters":{"message":"Window specification w is not defined in the WINDOW clause."},"queryContext":[]} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala index 4b8a038c5127b..a4cfd99b16e57 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala @@ -77,11 +77,13 @@ trait SQLQueryTestHelper { } private def toJson(e: SparkThrowable): String = { + assert(e.getParameterNames.size == e.getMessageParameters.size, + "Number of message parameter names and values must be the same") val jValue = ("errorClass" -> e.getErrorClass) ~ ("errorSubClass" -> Option(e.getErrorSubClass)) ~ ("sqlState" -> Option(e.getSqlState)) ~ ("messageParameters" -> - JArray(e.getMessageParameters.map(JString(_)).toList)) ~ + JObject((e.getParameterNames zip e.getMessageParameters.map(JString)).toList)) ~ ("queryContext" -> JArray( e.getQueryContext.map(c => JObject( "objectType" -> JString(c.objectType()), @@ -95,7 +97,7 @@ trait SQLQueryTestHelper { private def toLegacyJson(msg: String): String = { val jValue = ("errorClass" -> "legacy") ~ - ("messageParameters" -> JArray(List(JString(msg)))) ~ + ("messageParameters" -> JObject(List("message" -> JString(msg)))) ~ ("queryContext" -> JArray(List.empty)) compact(render(jValue)) } From 6a6669b819b4259cd00cce4cad1b290eec81148c Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 19 Aug 2022 15:58:27 +0300 Subject: [PATCH 4/4] Output the legacy messages as is --- .../sql-tests/results/ansi/array.sql.out | 21 +- .../sql-tests/results/ansi/cast.sql.out | 92 +- .../sql-tests/results/ansi/date.sql.out | 120 +- .../ansi/datetime-parsing-invalid.sql.out | 7 +- .../ansi/higher-order-functions.sql.out | 7 +- .../sql-tests/results/ansi/interval.sql.out | 917 +++--- .../sql-tests/results/ansi/literals.sql.out | 203 +- .../sql-tests/results/ansi/map.sql.out | 14 +- .../results/ansi/string-functions.sql.out | 56 +- .../sql-tests/results/ansi/timestamp.sql.out | 120 +- .../results/ansi/try_arithmetic.sql.out | 7 +- .../resources/sql-tests/results/array.sql.out | 21 +- .../sql-tests/results/bitwise.sql.out | 28 +- .../resources/sql-tests/results/cast.sql.out | 12 +- .../ceil-floor-with-scale-param.sql.out | 42 +- .../sql-tests/results/change-column.sql.out | 66 +- .../sql-tests/results/charvarchar.sql.out | 14 +- .../results/columnresolution-negative.sql.out | 70 +- .../sql-tests/results/comments.sql.out | 41 +- .../resources/sql-tests/results/count.sql.out | 14 +- .../sql-tests/results/csv-functions.sql.out | 76 +- .../sql-tests/results/cte-legacy.sql.out | 7 +- .../sql-tests/results/cte-nested.sql.out | 56 +- .../resources/sql-tests/results/cte.sql.out | 36 +- .../resources/sql-tests/results/date.sql.out | 127 +- .../datetime-formatting-invalid.sql.out | 105 +- .../datetime-formatting-legacy.sql.out | 42 +- .../sql-tests/results/datetime-legacy.sql.out | 254 +- .../results/datetime-parsing-invalid.sql.out | 7 +- .../sql-tests/results/describe.sql.out | 16 +- .../sql-tests/results/except-all.sql.out | 14 +- .../sql-tests/results/extract.sql.out | 77 +- .../sql-tests/results/group-analytics.sql.out | 38 +- .../sql-tests/results/group-by-filter.sql.out | 21 +- .../results/group-by-ordinal.sql.out | 70 +- .../sql-tests/results/group-by.sql.out | 124 +- .../sql-tests/results/grouping_set.sql.out | 7 +- .../sql-tests/results/having.sql.out | 7 +- .../results/higher-order-functions.sql.out | 7 +- .../sql-tests/results/ilike-all.sql.out | 12 +- .../sql-tests/results/ilike-any.sql.out | 12 +- .../sql-tests/results/inline-table.sql.out | 42 +- .../sql-tests/results/intersect-all.sql.out | 14 +- .../sql-tests/results/interval.sql.out | 917 +++--- .../sql-tests/results/join-lateral.sql.out | 39 +- .../sql-tests/results/json-functions.sql.out | 111 +- .../sql-tests/results/like-all.sql.out | 12 +- .../sql-tests/results/like-any.sql.out | 12 +- .../resources/sql-tests/results/limit.sql.out | 42 +- .../sql-tests/results/literals.sql.out | 203 +- .../resources/sql-tests/results/map.sql.out | 14 +- .../sql-tests/results/misc-functions.sql.out | 49 +- .../results/order-by-ordinal.sql.out | 21 +- .../sql-tests/results/percentiles.sql.out | 84 +- .../resources/sql-tests/results/pivot.sql.out | 21 +- .../postgreSQL/aggregates_part1.sql.out | 7 +- .../postgreSQL/aggregates_part3.sql.out | 7 +- .../results/postgreSQL/boolean.sql.out | 11 +- .../results/postgreSQL/create_view.sql.out | 119 +- .../sql-tests/results/postgreSQL/date.sql.out | 165 +- .../sql-tests/results/postgreSQL/int8.sql.out | 12 +- .../results/postgreSQL/interval.sql.out | 120 +- .../sql-tests/results/postgreSQL/join.sql.out | 14 +- .../results/postgreSQL/limit.sql.out | 14 +- .../results/postgreSQL/numeric.sql.out | 45 +- .../results/postgreSQL/select_having.sql.out | 7 +- .../postgreSQL/select_implicit.sql.out | 28 +- .../results/postgreSQL/strings.sql.out | 56 +- .../sql-tests/results/postgreSQL/text.sql.out | 28 +- .../results/postgreSQL/window_part3.sql.out | 80 +- .../results/postgreSQL/window_part4.sql.out | 11 +- .../sql-tests/results/postgreSQL/with.sql.out | 38 +- .../sql-tests/results/random.sql.out | 14 +- .../results/regexp-functions.sql.out | 98 +- .../sql-tests/results/show-tables.sql.out | 30 +- .../sql-tests/results/show-views.sql.out | 7 +- .../sql-tests/results/show_columns.sql.out | 35 +- .../sql-compatibility-functions.sql.out | 7 +- .../results/string-functions.sql.out | 56 +- .../subquery/in-subquery/in-basic.sql.out | 15 +- .../invalid-correlation.sql.out | 42 +- .../subq-input-typecheck.sql.out | 59 +- .../sql-tests/results/table-aliases.sql.out | 14 +- .../results/table-valued-functions.sql.out | 55 +- .../results/tablesample-negative.sql.out | 24 +- .../sql-tests/results/timestamp-ntz.sql.out | 7 +- .../sql-tests/results/timestamp.sql.out | 127 +- .../timestampNTZ/timestamp-ansi.sql.out | 141 +- .../results/timestampNTZ/timestamp.sql.out | 148 +- .../sql-tests/results/timezone.sql.out | 79 +- .../sql-tests/results/transform.sql.out | 36 +- .../sql-tests/results/try_arithmetic.sql.out | 7 +- .../native/booleanEquality.sql.out | 168 +- .../native/caseWhenCoercion.sql.out | 490 +--- .../native/dateTimeOperations.sql.out | 189 +- .../native/decimalPrecision.sql.out | 2464 +++-------------- .../typeCoercion/native/division.sql.out | 560 +--- .../typeCoercion/native/ifCoercion.sql.out | 490 +--- .../typeCoercion/native/inConversion.sql.out | 980 +------ .../typeCoercion/native/mapZipWith.sql.out | 14 +- .../typeCoercion/native/mapconcat.sql.out | 35 +- .../native/promoteStrings.sql.out | 329 +-- .../native/stringCastAndExpressions.sql.out | 21 +- .../native/widenSetOperationTypes.sql.out | 490 +--- .../native/windowFrameCoercion.sql.out | 28 +- .../resources/sql-tests/results/udaf.sql.out | 14 +- .../postgreSQL/udf-aggregates_part1.sql.out | 7 +- .../postgreSQL/udf-aggregates_part3.sql.out | 7 +- .../results/udf/postgreSQL/udf-join.sql.out | 14 +- .../udf/postgreSQL/udf-select_having.sql.out | 7 +- .../postgreSQL/udf-select_implicit.sql.out | 28 +- .../results/udf/udf-except-all.sql.out | 14 +- .../sql-tests/results/udf/udf-except.sql.out | 15 +- .../results/udf/udf-group-analytics.sql.out | 14 +- .../results/udf/udf-group-by.sql.out | 100 +- .../results/udf/udf-inline-table.sql.out | 42 +- .../results/udf/udf-intersect-all.sql.out | 14 +- .../sql-tests/results/udf/udf-pivot.sql.out | 21 +- .../sql-tests/results/udf/udf-udaf.sql.out | 14 +- .../sql-tests/results/udf/udf-window.sql.out | 62 +- .../sql-tests/results/window.sql.out | 90 +- .../apache/spark/sql/SQLQueryTestHelper.scala | 16 +- 122 files changed, 2862 insertions(+), 10206 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out index 582d0080c82bc..79c0593819964 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out @@ -128,12 +128,7 @@ select sort_array(array('b', 'd'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'sort_array(array('b', 'd'), '1')' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7" - } -} +cannot resolve 'sort_array(array('b', 'd'), '1')' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7 -- !query @@ -142,12 +137,7 @@ select sort_array(array('b', 'd'), cast(NULL as boolean)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'sort_array(array('b', 'd'), CAST(NULL AS BOOLEAN))' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7" - } -} +cannot resolve 'sort_array(array('b', 'd'), CAST(NULL AS BOOLEAN))' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7 -- !query @@ -412,12 +402,7 @@ select array_size(map('a', 1, 'b', 2)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'array_size(map('a', 1, 'b', 2))' due to data type mismatch: argument 1 requires array type, however, 'map('a', 1, 'b', 2)' is of map type.; line 1 pos 7" - } -} +cannot resolve 'array_size(map('a', 1, 'b', 2))' due to data type mismatch: argument 1 requires array type, however, 'map('a', 1, 'b', 2)' is of map type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out index d1b415c6a414f..95b2e0ef42bcd 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out @@ -611,12 +611,10 @@ SELECT HEX(CAST(CAST(123 AS byte) AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(CAST(123 AS TINYINT) AS BINARY)' due to data type mismatch: \n cannot cast tinyint to binary with ANSI mode on.\n If you have to cast tinyint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11" - } -} +cannot resolve 'CAST(CAST(123 AS TINYINT) AS BINARY)' due to data type mismatch: + cannot cast tinyint to binary with ANSI mode on. + If you have to cast tinyint to binary, you can set spark.sql.ansi.enabled as false. +; line 1 pos 11 -- !query @@ -625,12 +623,10 @@ SELECT HEX(CAST(CAST(-123 AS byte) AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(CAST(-123 AS TINYINT) AS BINARY)' due to data type mismatch: \n cannot cast tinyint to binary with ANSI mode on.\n If you have to cast tinyint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11" - } -} +cannot resolve 'CAST(CAST(-123 AS TINYINT) AS BINARY)' due to data type mismatch: + cannot cast tinyint to binary with ANSI mode on. + If you have to cast tinyint to binary, you can set spark.sql.ansi.enabled as false. +; line 1 pos 11 -- !query @@ -639,12 +635,10 @@ SELECT HEX(CAST(123S AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(123S AS BINARY)' due to data type mismatch: \n cannot cast smallint to binary with ANSI mode on.\n If you have to cast smallint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11" - } -} +cannot resolve 'CAST(123S AS BINARY)' due to data type mismatch: + cannot cast smallint to binary with ANSI mode on. + If you have to cast smallint to binary, you can set spark.sql.ansi.enabled as false. +; line 1 pos 11 -- !query @@ -653,12 +647,10 @@ SELECT HEX(CAST(-123S AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(-123S AS BINARY)' due to data type mismatch: \n cannot cast smallint to binary with ANSI mode on.\n If you have to cast smallint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11" - } -} +cannot resolve 'CAST(-123S AS BINARY)' due to data type mismatch: + cannot cast smallint to binary with ANSI mode on. + If you have to cast smallint to binary, you can set spark.sql.ansi.enabled as false. +; line 1 pos 11 -- !query @@ -667,12 +659,10 @@ SELECT HEX(CAST(123 AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(123 AS BINARY)' due to data type mismatch: \n cannot cast int to binary with ANSI mode on.\n If you have to cast int to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11" - } -} +cannot resolve 'CAST(123 AS BINARY)' due to data type mismatch: + cannot cast int to binary with ANSI mode on. + If you have to cast int to binary, you can set spark.sql.ansi.enabled as false. +; line 1 pos 11 -- !query @@ -681,12 +671,10 @@ SELECT HEX(CAST(-123 AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(-123 AS BINARY)' due to data type mismatch: \n cannot cast int to binary with ANSI mode on.\n If you have to cast int to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11" - } -} +cannot resolve 'CAST(-123 AS BINARY)' due to data type mismatch: + cannot cast int to binary with ANSI mode on. + If you have to cast int to binary, you can set spark.sql.ansi.enabled as false. +; line 1 pos 11 -- !query @@ -695,12 +683,10 @@ SELECT HEX(CAST(123L AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(123L AS BINARY)' due to data type mismatch: \n cannot cast bigint to binary with ANSI mode on.\n If you have to cast bigint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11" - } -} +cannot resolve 'CAST(123L AS BINARY)' due to data type mismatch: + cannot cast bigint to binary with ANSI mode on. + If you have to cast bigint to binary, you can set spark.sql.ansi.enabled as false. +; line 1 pos 11 -- !query @@ -709,12 +695,10 @@ SELECT HEX(CAST(-123L AS binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(-123L AS BINARY)' due to data type mismatch: \n cannot cast bigint to binary with ANSI mode on.\n If you have to cast bigint to binary, you can set spark.sql.ansi.enabled as false.\n; line 1 pos 11" - } -} +cannot resolve 'CAST(-123L AS BINARY)' due to data type mismatch: + cannot cast bigint to binary with ANSI mode on. + If you have to cast bigint to binary, you can set spark.sql.ansi.enabled as false. +; line 1 pos 11 -- !query @@ -772,12 +756,12 @@ SELECT CAST(interval 3 month 1 hour AS string) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval 3 month 1 hour(line 1, pos 12)\n\n== SQL ==\nSELECT CAST(interval 3 month 1 hour AS string)\n------------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval 3 month 1 hour(line 1, pos 12) + +== SQL == +SELECT CAST(interval 3 month 1 hour AS string) +------------^^^ -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index 8cefaab3ec2cc..83ff07bc43fac 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -21,12 +21,12 @@ select date '2020-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 2020-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect date '2020-01-01中文'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 2020-01-01中文(line 1, pos 7) + +== SQL == +select date '2020-01-01中文' +-------^^^ -- !query @@ -43,12 +43,7 @@ select make_date(2000, 13, 1) struct<> -- !query output java.time.DateTimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13. If necessary set spark.sql.ansi.enabled to false to bypass this error." - } -} +Invalid value for MonthOfYear (valid values 1 - 12): 13. If necessary set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -57,12 +52,7 @@ select make_date(2000, 1, 33) struct<> -- !query output java.time.DateTimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): 33. If necessary set spark.sql.ansi.enabled to false to bypass this error." - } -} +Invalid value for DayOfMonth (valid values 1 - 28/31): 33. If necessary set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -71,12 +61,12 @@ select date'015' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 015(line 1, pos 7)\n\n== SQL ==\nselect date'015'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 015(line 1, pos 7) + +== SQL == +select date'015' +-------^^^ -- !query @@ -85,12 +75,12 @@ select date'2021-4294967297-11' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)\n\n== SQL ==\nselect date'2021-4294967297-11'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 2021-4294967297-11(line 1, pos 7) + +== SQL == +select date'2021-4294967297-11' +-------^^^ -- !query @@ -213,12 +203,7 @@ select next_day("2015-07-23", "xx") struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Illegal input for day of week: xx. If necessary set spark.sql.ansi.enabled to false to bypass this error." - } -} +Illegal input for day of week: xx. If necessary set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -324,12 +309,7 @@ select date_add('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 -- !query @@ -338,12 +318,7 @@ select date_add('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 -- !query @@ -352,12 +327,7 @@ select date_add('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 -- !query @@ -463,12 +433,7 @@ select date_sub('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 -- !query @@ -477,12 +442,7 @@ select date_sub('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 -- !query @@ -491,12 +451,7 @@ select date_sub('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 -- !query @@ -602,12 +557,7 @@ select date '2011-11-11' + 1E1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 -- !query @@ -696,12 +646,7 @@ select date'2011-11-11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DATE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DATE)' is of date type.; line 1 pos 7" - } -} +cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DATE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DATE)' is of date type.; line 1 pos 7 -- !query @@ -710,12 +655,7 @@ select '1' + date'2011-11-11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('1' AS DATE), DATE '2011-11-11')' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'DATE '2011-11-11'' is of date type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('1' AS DATE), DATE '2011-11-11')' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'DATE '2011-11-11'' is of date type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out index 7b8afa65b2737..507ce620b8995 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out @@ -5,12 +5,7 @@ select to_timestamp('294248', 'y') struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out index 7c22d70e216df..c6bbb4fb7179a 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/higher-order-functions.sql.out @@ -17,12 +17,7 @@ select upper(x -> x) as v struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "A lambda function should only be used in a higher order function. However, its class is org.apache.spark.sql.catalyst.expressions.Upper, which is not a higher order function.; line 1 pos 7" - } -} +A lambda function should only be used in a higher order function. However, its class is org.apache.spark.sql.catalyst.expressions.Upper, which is not a higher order function.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 43a48562453c3..197370f5a15c6 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -13,12 +13,12 @@ select interval 4 month 2 weeks 3 microseconds * 1.5 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval 4 month 2 weeks 3 microseconds(line 1, pos 7)\n\n== SQL ==\nselect interval 4 month 2 weeks 3 microseconds * 1.5\n-------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval 4 month 2 weeks 3 microseconds(line 1, pos 7) + +== SQL == +select interval 4 month 2 weeks 3 microseconds * 1.5 +-------^^^ -- !query @@ -51,12 +51,7 @@ select interval 2147483647 month * 2 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "integer overflow" - } -} +integer overflow -- !query @@ -65,12 +60,7 @@ select interval 2147483647 month / 0.5 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Overflow" - } -} +Overflow -- !query @@ -79,12 +69,7 @@ select interval 2147483647 day * 2 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -93,12 +78,7 @@ select interval 2147483647 day / 0.5 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -305,12 +285,7 @@ select '2' / interval 2 second struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('2' / INTERVAL '02' SECOND)' due to data type mismatch: differing types in '('2' / INTERVAL '02' SECOND)' (string and interval second).; line 1 pos 7" - } -} +cannot resolve '('2' / INTERVAL '02' SECOND)' due to data type mismatch: differing types in '('2' / INTERVAL '02' SECOND)' (string and interval second).; line 1 pos 7 -- !query @@ -319,12 +294,7 @@ select '2' / interval 2 year struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('2' / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '('2' / INTERVAL '2' YEAR)' (string and interval year).; line 1 pos 7" - } -} +cannot resolve '('2' / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '('2' / INTERVAL '2' YEAR)' (string and interval year).; line 1 pos 7 -- !query @@ -421,12 +391,7 @@ select 2 / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(2 / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(2 / INTERVAL '2' YEAR)' (int and interval year).; line 1 pos 7" - } -} +cannot resolve '(2 / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(2 / INTERVAL '2' YEAR)' (int and interval year).; line 1 pos 7 -- !query @@ -435,12 +400,7 @@ select 2 / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(2 / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(2 / INTERVAL '02' HOUR)' (int and interval hour).; line 1 pos 7" - } -} +cannot resolve '(2 / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(2 / INTERVAL '02' HOUR)' (int and interval hour).; line 1 pos 7 -- !query @@ -449,12 +409,7 @@ select null / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(NULL / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(NULL / INTERVAL '2' YEAR)' (void and interval year).; line 1 pos 7" - } -} +cannot resolve '(NULL / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(NULL / INTERVAL '2' YEAR)' (void and interval year).; line 1 pos 7 -- !query @@ -463,12 +418,7 @@ select null / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(NULL / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(NULL / INTERVAL '02' HOUR)' (void and interval hour).; line 1 pos 7" - } -} +cannot resolve '(NULL / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(NULL / INTERVAL '02' HOUR)' (void and interval hour).; line 1 pos 7 -- !query @@ -477,12 +427,12 @@ select -interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect -interval '-1 month 1 day -1 second'\n--------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8) + +== SQL == +select -interval '-1 month 1 day -1 second' +--------^^^ -- !query @@ -507,12 +457,12 @@ select -interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect -interval -1 month 1 day -1 second\n--------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8) + +== SQL == +select -interval -1 month 1 day -1 second +--------^^^ -- !query @@ -537,12 +487,12 @@ select +interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect +interval '-1 month 1 day -1 second'\n--------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8) + +== SQL == +select +interval '-1 month 1 day -1 second' +--------^^^ -- !query @@ -567,12 +517,12 @@ select +interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect +interval -1 month 1 day -1 second\n--------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8) + +== SQL == +select +interval -1 month 1 day -1 second +--------^^^ -- !query @@ -870,12 +820,7 @@ select make_dt_interval(2147483647) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -916,12 +861,7 @@ select make_ym_interval(178956970, 8) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "integer overflow" - } -} +integer overflow -- !query @@ -938,12 +878,7 @@ select make_ym_interval(-178956970, -9) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "integer overflow" - } -} +integer overflow -- !query @@ -1016,12 +951,12 @@ select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisec struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond(line 1, pos 7)\n\n== SQL ==\nselect interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond\n-------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond(line 1, pos 7) + +== SQL == +select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond +-------^^^ -- !query @@ -1054,12 +989,12 @@ select interval '30' year '25' month '-100' day '40' hour '80' minute '299.88998 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second(line 1, pos 7)\n\n== SQL ==\nselect interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second\n-------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second(line 1, pos 7) + +== SQL == +select interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second +-------^^^ -- !query @@ -1188,12 +1123,12 @@ select interval '20 15:40:32.99899999' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to hour\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '20 15:40:32.99899999' day to hour +----------------^^^ -- !query @@ -1202,12 +1137,12 @@ select interval '20 15:40:32.99899999' day to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to minute\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '20 15:40:32.99899999' day to minute +----------------^^^ -- !query @@ -1216,12 +1151,12 @@ select interval '15:40:32.99899999' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40:32.99899999' hour to minute\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '15:40:32.99899999' hour to minute +----------------^^^ -- !query @@ -1230,12 +1165,12 @@ select interval '15:40.99899999' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40.99899999' hour to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '15:40.99899999' hour to second +----------------^^^ -- !query @@ -1244,12 +1179,12 @@ select interval '15:40' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40' hour to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '15:40' hour to second +----------------^^^ -- !query @@ -1258,12 +1193,12 @@ select interval '20 40:32.99899999' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 40:32.99899999' minute to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '20 40:32.99899999' minute to second +----------------^^^ -- !query @@ -1272,12 +1207,12 @@ select interval 10 nanoseconds struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nError parsing ' 10 nanoseconds' to interval, invalid unit 'nanoseconds'(line 1, pos 16)\n\n== SQL ==\nselect interval 10 nanoseconds\n----------------^^^\n" - } -} + +Error parsing ' 10 nanoseconds' to interval, invalid unit 'nanoseconds'(line 1, pos 16) + +== SQL == +select interval 10 nanoseconds +----------------^^^ -- !query @@ -1366,12 +1301,12 @@ select interval struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nat least one time unit should be given for interval literal(line 1, pos 7)\n\n== SQL ==\nselect interval\n-------^^^\n" - } -} + +at least one time unit should be given for interval literal(line 1, pos 7) + +== SQL == +select interval +-------^^^ -- !query @@ -1380,12 +1315,12 @@ select interval 1 fake_unit struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nError parsing ' 1 fake_unit' to interval, invalid unit 'fake_unit'(line 1, pos 16)\n\n== SQL ==\nselect interval 1 fake_unit\n----------------^^^\n" - } -} + +Error parsing ' 1 fake_unit' to interval, invalid unit 'fake_unit'(line 1, pos 16) + +== SQL == +select interval 1 fake_unit +----------------^^^ -- !query @@ -1394,12 +1329,12 @@ select interval 1 year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nThe value of from-to unit must be a string(line 1, pos 16)\n\n== SQL ==\nselect interval 1 year to month\n----------------^^^\n" - } -} + +The value of from-to unit must be a string(line 1, pos 16) + +== SQL == +select interval 1 year to month +----------------^^^ -- !query @@ -1408,12 +1343,12 @@ select interval '1' year to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nIntervals FROM year TO second are not supported.(line 1, pos 16)\n\n== SQL ==\nselect interval '1' year to second\n----------------^^^\n" - } -} + +Intervals FROM year TO second are not supported.(line 1, pos 16) + +== SQL == +select interval '1' year to second +----------------^^^ -- !query @@ -1422,12 +1357,12 @@ select interval '10-9' year to month '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '2-1' year to month\n-------------------------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) + +== SQL == +select interval '10-9' year to month '2-1' year to month +-------------------------------------^^^ -- !query @@ -1436,12 +1371,12 @@ select interval '10-9' year to month '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '12:11:10' hour to second\n-------------------------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) + +== SQL == +select interval '10-9' year to month '12:11:10' hour to second +-------------------------------------^^^ -- !query @@ -1450,12 +1385,12 @@ select interval '1 15:11' day to minute '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 40)\n\n== SQL ==\nselect interval '1 15:11' day to minute '12:11:10' hour to second\n----------------------------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 40) + +== SQL == +select interval '1 15:11' day to minute '12:11:10' hour to second +----------------------------------------^^^ -- !query @@ -1464,12 +1399,12 @@ select interval 1 year '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '2-1' year to month\n-----------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 23) + +== SQL == +select interval 1 year '2-1' year to month +-----------------------^^^ -- !query @@ -1478,12 +1413,12 @@ select interval 1 year '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '12:11:10' hour to second\n-----------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 23) + +== SQL == +select interval 1 year '12:11:10' hour to second +-----------------------^^^ -- !query @@ -1492,12 +1427,12 @@ select interval '10-9' year to month '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '1' year\n-------------------------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) + +== SQL == +select interval '10-9' year to month '1' year +-------------------------------------^^^ -- !query @@ -1506,12 +1441,12 @@ select interval '12:11:10' hour to second '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 42)\n\n== SQL ==\nselect interval '12:11:10' hour to second '1' year\n------------------------------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 42) + +== SQL == +select interval '12:11:10' hour to second '1' year +------------------------------------------^^^ -- !query @@ -1520,12 +1455,7 @@ select interval (-30) day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7" - } -} +Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 -- !query @@ -1534,12 +1464,7 @@ select interval (a + 1) day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7" - } -} +Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 -- !query @@ -1564,12 +1489,7 @@ select interval (-30) days struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7" - } -} +Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 -- !query @@ -1578,12 +1498,7 @@ select interval (a + 1) days struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7" - } -} +Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 -- !query @@ -1616,12 +1531,12 @@ SELECT INTERVAL '178956970-8' YEAR TO MONTH struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nError parsing interval year-month string: integer overflow(line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '178956970-8' YEAR TO MONTH\n----------------^^^\n" - } -} + +Error parsing interval year-month string: integer overflow(line 1, pos 16) + +== SQL == +SELECT INTERVAL '178956970-8' YEAR TO MONTH +----------------^^^ -- !query @@ -1666,12 +1581,7 @@ select struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '2' YEAR + '3-3 year to month')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + '3-3 year to month')' (interval year and string).; line 2 pos 2" - } -} +cannot resolve '(INTERVAL '2' YEAR + '3-3 year to month')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + '3-3 year to month')' (interval year and string).; line 2 pos 2 -- !query @@ -1696,12 +1606,7 @@ select interval '2' year + '3-3' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '2' YEAR + '3-3')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + '3-3')' (interval year and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '2' YEAR + '3-3')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + '3-3')' (interval year and string).; line 1 pos 7 -- !query @@ -1710,12 +1615,7 @@ select interval '2' year - '4' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '2' YEAR - '4')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - '4')' (interval year and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '2' YEAR - '4')' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - '4')' (interval year and string).; line 1 pos 7 -- !query @@ -1782,12 +1682,7 @@ select interval '2' year + str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '2' YEAR + interval_view.str)' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + interval_view.str)' (interval year and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '2' YEAR + interval_view.str)' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + interval_view.str)' (interval year and string).; line 1 pos 7 -- !query @@ -1796,12 +1691,7 @@ select interval '2' year - str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '2' YEAR - interval_view.str)' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - interval_view.str)' (interval year and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '2' YEAR - interval_view.str)' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - interval_view.str)' (interval year and string).; line 1 pos 7 -- !query @@ -1860,12 +1750,7 @@ select interval '2-2' year to month + interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7" - } -} +cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7 -- !query @@ -1874,12 +1759,7 @@ select interval '3' day + interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7" - } -} +cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7 -- !query @@ -1888,12 +1768,7 @@ select interval '2-2' year to month - interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7" - } -} +cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7 -- !query @@ -1902,12 +1777,7 @@ select interval '3' day - interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' due to data type mismatch: differing types in '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' (interval day and interval year to month).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' due to data type mismatch: differing types in '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' (interval day and interval year to month).; line 1 pos 7 -- !query @@ -1916,12 +1786,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7" - } -} +cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7 -- !query @@ -1930,12 +1795,7 @@ select 1 + interval '2' month struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(1 + INTERVAL '2' MONTH)' due to data type mismatch: differing types in '(1 + INTERVAL '2' MONTH)' (int and interval month).; line 1 pos 7" - } -} +cannot resolve '(1 + INTERVAL '2' MONTH)' due to data type mismatch: differing types in '(1 + INTERVAL '2' MONTH)' (int and interval month).; line 1 pos 7 -- !query @@ -1944,12 +1804,7 @@ select interval '2' second + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '1 + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7" - } -} +cannot resolve '1 + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7 -- !query @@ -1958,12 +1813,7 @@ select interval '2' month - 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '2' MONTH - 1)' due to data type mismatch: differing types in '(INTERVAL '2' MONTH - 1)' (interval month and int).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '2' MONTH - 1)' due to data type mismatch: differing types in '(INTERVAL '2' MONTH - 1)' (interval month and int).; line 1 pos 7 -- !query @@ -2020,12 +1870,12 @@ select interval '-\t2-2\t' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match year-month format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: -\t2-2\t(line 1, pos 16)\n\n== SQL ==\nselect interval '-\\t2-2\\t' year to month\n----------------^^^\n" - } -} + +Interval string does not match year-month format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: - 2-2 (line 1, pos 16) + +== SQL == +select interval '-\t2-2\t' year to month +----------------^^^ -- !query @@ -2042,12 +1892,13 @@ select interval '\n-\t10\t 12:34:46.789\t' day to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: \n-\t10\t 12:34:46.789\t, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '\\n-\\t10\\t 12:34:46.789\\t' day to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: +- 10 12:34:46.789 , set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '\n-\t10\t 12:34:46.789\t' day to second +----------------^^^ -- !query @@ -2056,12 +1907,12 @@ select interval '中文 interval 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: 中文 interval 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval '中文 interval 1 day'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: 中文 interval 1 day(line 1, pos 7) + +== SQL == +select interval '中文 interval 1 day' +-------^^^ -- !query @@ -2070,12 +1921,12 @@ select interval 'interval中文 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: interval中文 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval中文 1 day'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: interval中文 1 day(line 1, pos 7) + +== SQL == +select interval 'interval中文 1 day' +-------^^^ -- !query @@ -2084,12 +1935,12 @@ select interval 'interval 1中文day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: interval 1中文day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval 1中文day'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: interval 1中文day(line 1, pos 7) + +== SQL == +select interval 'interval 1中文day' +-------^^^ -- !query @@ -2146,12 +1997,7 @@ select a * 1.1 from values (interval '-2147483648 months', interval '2147483647 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Overflow" - } -} +Overflow -- !query @@ -2160,12 +2006,7 @@ select a / 0.5 from values (interval '-2147483648 months', interval '2147483647 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Overflow" - } -} +Overflow -- !query @@ -2226,12 +2067,12 @@ select interval '+' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: +(line 1, pos 7)\n\n== SQL ==\nselect interval '+'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: +(line 1, pos 7) + +== SQL == +select interval '+' +-------^^^ -- !query @@ -2240,12 +2081,12 @@ select interval '+.' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: +.(line 1, pos 7)\n\n== SQL ==\nselect interval '+.'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: +.(line 1, pos 7) + +== SQL == +select interval '+.' +-------^^^ -- !query @@ -2254,12 +2095,12 @@ select interval '1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: 1(line 1, pos 7) + +== SQL == +select interval '1' +-------^^^ -- !query @@ -2268,12 +2109,12 @@ select interval '1.2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: 1.2(line 1, pos 7)\n\n== SQL ==\nselect interval '1.2'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: 1.2(line 1, pos 7) + +== SQL == +select interval '1.2' +-------^^^ -- !query @@ -2282,12 +2123,12 @@ select interval '- 2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: - 2(line 1, pos 7)\n\n== SQL ==\nselect interval '- 2'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: - 2(line 1, pos 7) + +== SQL == +select interval '- 2' +-------^^^ -- !query @@ -2296,12 +2137,12 @@ select interval '1 day -' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: 1 day -(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day -'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: 1 day -(line 1, pos 7) + +== SQL == +select interval '1 day -' +-------^^^ -- !query @@ -2310,12 +2151,12 @@ select interval '1 day 1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: 1 day 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day 1'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: 1 day 1(line 1, pos 7) + +== SQL == +select interval '1 day 1' +-------^^^ -- !query @@ -2324,12 +2165,12 @@ select interval '1 day 2' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: 1 day 2(line 1, pos 16)\n\n== SQL ==\nselect interval '1 day 2' day\n----------------^^^\n" - } -} + +Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: 1 day 2(line 1, pos 16) + +== SQL == +select interval '1 day 2' day +----------------^^^ -- !query @@ -2338,12 +2179,12 @@ select interval 'interval 1' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: interval 1(line 1, pos 16)\n\n== SQL ==\nselect interval 'interval 1' day\n----------------^^^\n" - } -} + +Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: interval 1(line 1, pos 16) + +== SQL == +select interval 'interval 1' day +----------------^^^ -- !query @@ -2422,12 +2263,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1.0 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Overflow" - } -} +Overflow -- !query @@ -2436,12 +2272,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1.0D struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "not in range" - } -} +not in range -- !query @@ -2512,12 +2343,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Overflow" - } -} +Overflow -- !query @@ -2526,12 +2352,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0D struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "not in range" - } -} +not in range -- !query @@ -2636,12 +2457,12 @@ SELECT INTERVAL '106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '106751992 04' DAY TO HOUR\n----------------^^^\n" - } -} + +requirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16) + +== SQL == +SELECT INTERVAL '106751992 04' DAY TO HOUR +----------------^^^ -- !query @@ -2650,12 +2471,12 @@ SELECT INTERVAL '-106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-106751992 04' DAY TO HOUR\n----------------^^^\n" - } -} + +requirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16) + +== SQL == +SELECT INTERVAL '-106751992 04' DAY TO HOUR +----------------^^^ -- !query @@ -2664,12 +2485,12 @@ SELECT INTERVAL '2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '2562047789:00' HOUR TO MINUTE\n----------------^^^\n" - } -} + +requirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16) + +== SQL == +SELECT INTERVAL '2562047789:00' HOUR TO MINUTE +----------------^^^ -- !query @@ -2678,12 +2499,12 @@ SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-2562047789:00' HOUR TO MINUTE\n----------------^^^\n" - } -} + +requirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16) + +== SQL == +SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE +----------------^^^ -- !query @@ -2692,12 +2513,12 @@ SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n" - } -} + +requirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16) + +== SQL == +SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND +----------------^^^ -- !query @@ -2706,12 +2527,12 @@ SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n" - } -} + +requirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16) + +== SQL == +SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND +----------------^^^ -- !query @@ -2848,12 +2669,7 @@ SELECT INTERVAL 1 MONTH > INTERVAL 20 DAYS struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' (interval month and interval day).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' (interval month and interval day).; line 1 pos 7 -- !query @@ -2862,12 +2678,7 @@ SELECT INTERVAL '1' DAY < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' DAY < '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY < '1')' (interval day and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' DAY < '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY < '1')' (interval day and string).; line 1 pos 7 -- !query @@ -2876,12 +2687,7 @@ SELECT INTERVAL '1' DAY = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' DAY = '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY = '1')' (interval day and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' DAY = '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY = '1')' (interval day and string).; line 1 pos 7 -- !query @@ -2890,12 +2696,7 @@ SELECT INTERVAL '1' DAY > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' DAY > '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY > '1')' (interval day and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' DAY > '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY > '1')' (interval day and string).; line 1 pos 7 -- !query @@ -2904,12 +2705,7 @@ SELECT '1' < INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' < INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' < INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7" - } -} +cannot resolve '('1' < INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' < INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7 -- !query @@ -2918,12 +2714,7 @@ SELECT '1' = INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' = INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' = INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7" - } -} +cannot resolve '('1' = INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' = INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7 -- !query @@ -2932,12 +2723,7 @@ SELECT '1' > INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' > INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' > INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7" - } -} +cannot resolve '('1' > INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' > INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7 -- !query @@ -2946,12 +2732,7 @@ SELECT INTERVAL '1' YEAR < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' YEAR < '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR < '1')' (interval year and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' YEAR < '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR < '1')' (interval year and string).; line 1 pos 7 -- !query @@ -2960,12 +2741,7 @@ SELECT INTERVAL '1' YEAR = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' YEAR = '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR = '1')' (interval year and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' YEAR = '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR = '1')' (interval year and string).; line 1 pos 7 -- !query @@ -2974,12 +2750,7 @@ SELECT INTERVAL '1' YEAR > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' YEAR > '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR > '1')' (interval year and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' YEAR > '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR > '1')' (interval year and string).; line 1 pos 7 -- !query @@ -2988,12 +2759,7 @@ SELECT '1' < INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' < INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' < INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7" - } -} +cannot resolve '('1' < INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' < INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7 -- !query @@ -3002,12 +2768,7 @@ SELECT '1' = INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' = INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' = INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7" - } -} +cannot resolve '('1' = INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' = INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7 -- !query @@ -3016,12 +2777,7 @@ SELECT '1' > INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' > INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' > INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7" - } -} +cannot resolve '('1' > INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' > INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7 -- !query @@ -3046,12 +2802,7 @@ SELECT array(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'array(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function array should all be the same type, but it's [interval month, interval day]; line 1 pos 7" - } -} +cannot resolve 'array(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function array should all be the same type, but it's [interval month, interval day]; line 1 pos 7 -- !query @@ -3076,12 +2827,7 @@ SELECT coalesce(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function coalesce should all be the same type, but it's [interval month, interval day]; line 1 pos 7" - } -} +cannot resolve 'coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function coalesce should all be the same type, but it's [interval month, interval day]; line 1 pos 7 -- !query @@ -3138,12 +2884,7 @@ SELECT div(INTERVAL '1' MONTH, INTERVAL '-1' DAY) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' (interval month and interval day).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' (interval month and interval day).; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out index 46c706c48c0ca..168025d911dea 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out @@ -37,12 +37,12 @@ select 128Y struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nNumeric literal 128 does not fit in range [-128, 127] for type tinyint(line 1, pos 7)\n\n== SQL ==\nselect 128Y\n-------^^^\n" - } -} + +Numeric literal 128 does not fit in range [-128, 127] for type tinyint(line 1, pos 7) + +== SQL == +select 128Y +-------^^^ -- !query @@ -67,12 +67,12 @@ select 32768S struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nNumeric literal 32768 does not fit in range [-32768, 32767] for type smallint(line 1, pos 7)\n\n== SQL ==\nselect 32768S\n-------^^^\n" - } -} + +Numeric literal 32768 does not fit in range [-32768, 32767] for type smallint(line 1, pos 7) + +== SQL == +select 32768S +-------^^^ -- !query @@ -97,12 +97,12 @@ select 9223372036854775808L struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nNumeric literal 9223372036854775808 does not fit in range [-9223372036854775808, 9223372036854775807] for type bigint(line 1, pos 7)\n\n== SQL ==\nselect 9223372036854775808L\n-------^^^\n" - } -} + +Numeric literal 9223372036854775808 does not fit in range [-9223372036854775808, 9223372036854775807] for type bigint(line 1, pos 7) + +== SQL == +select 9223372036854775808L +-------^^^ -- !query @@ -143,12 +143,10 @@ select 1234567890123456789012345678901234567890 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890" - } -} + +decimal can only support precision up to 38 +== SQL == +select 1234567890123456789012345678901234567890 -- !query @@ -157,12 +155,10 @@ select 1234567890123456789012345678901234567890.0 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890.0" - } -} + +decimal can only support precision up to 38 +== SQL == +select 1234567890123456789012345678901234567890.0 -- !query @@ -187,12 +183,12 @@ select -3.4028235E39f struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nNumeric literal -3.4028235E39 does not fit in range [-3.4028234663852886E+38, 3.4028234663852886E+38] for type float(line 1, pos 7)\n\n== SQL ==\nselect -3.4028235E39f\n-------^^^\n" - } -} + +Numeric literal -3.4028235E39 does not fit in range [-3.4028234663852886E+38, 3.4028234663852886E+38] for type float(line 1, pos 7) + +== SQL == +select -3.4028235E39f +-------^^^ -- !query @@ -233,12 +229,12 @@ select 1E309, -1E309 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nNumeric literal 1E309 does not fit in range [-1.7976931348623157E+308, 1.7976931348623157E+308] for type double(line 1, pos 7)\n\n== SQL ==\nselect 1E309, -1E309\n-------^^^\n" - } -} + +Numeric literal 1E309 does not fit in range [-1.7976931348623157E+308, 1.7976931348623157E+308] for type double(line 1, pos 7) + +== SQL == +select 1E309, -1E309 +-------^^^ -- !query @@ -337,12 +333,12 @@ select date 'mar 11 2016' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: mar 11 2016(line 1, pos 7)\n\n== SQL ==\nselect date 'mar 11 2016'\n-------^^^\n" - } -} + +Cannot parse the DATE value: mar 11 2016(line 1, pos 7) + +== SQL == +select date 'mar 11 2016' +-------^^^ -- !query @@ -359,12 +355,12 @@ select timestamp '2016-33-11 20:54:00.000' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 2016-33-11 20:54:00.000(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2016-33-11 20:54:00.000'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 2016-33-11 20:54:00.000(line 1, pos 7) + +== SQL == +select timestamp '2016-33-11 20:54:00.000' +-------^^^ -- !query @@ -373,12 +369,12 @@ select GEO '(10,-6)' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nLiterals of type 'GEO' are currently not supported.(line 1, pos 7)\n\n== SQL ==\nselect GEO '(10,-6)'\n-------^^^\n" - } -} + +Literals of type 'GEO' are currently not supported.(line 1, pos 7) + +== SQL == +select GEO '(10,-6)' +-------^^^ -- !query @@ -395,12 +391,12 @@ select 1.20E-38BD struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\ndecimal can only support precision up to 38(line 1, pos 7)\n\n== SQL ==\nselect 1.20E-38BD\n-------^^^\n" - } -} + +decimal can only support precision up to 38(line 1, pos 7) + +== SQL == +select 1.20E-38BD +-------^^^ -- !query @@ -417,12 +413,12 @@ select X'XuZ' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\ncontains illegal character for hexBinary: 0XuZ(line 1, pos 7)\n\n== SQL ==\nselect X'XuZ'\n-------^^^\n" - } -} + +contains illegal character for hexBinary: 0XuZ(line 1, pos 7) + +== SQL == +select X'XuZ' +-------^^^ -- !query @@ -439,12 +435,7 @@ select +date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(+ DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7" - } -} +cannot resolve '(+ DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7 -- !query @@ -453,12 +444,7 @@ select +timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7" - } -} +cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7 -- !query @@ -475,12 +461,7 @@ select +map(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(+ map(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'map(1, 2)' is of map type.; line 1 pos 7" - } -} +cannot resolve '(+ map(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'map(1, 2)' is of map type.; line 1 pos 7 -- !query @@ -489,12 +470,7 @@ select +array(1,2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(+ array(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'array(1, 2)' is of array type.; line 1 pos 7" - } -} +cannot resolve '(+ array(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'array(1, 2)' is of array type.; line 1 pos 7 -- !query @@ -503,12 +479,7 @@ select +named_struct('a', 1, 'b', 'spark') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(+ named_struct('a', 1, 'b', 'spark'))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'named_struct('a', 1, 'b', 'spark')' is of struct type.; line 1 pos 7" - } -} +cannot resolve '(+ named_struct('a', 1, 'b', 'spark'))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'named_struct('a', 1, 'b', 'spark')' is of struct type.; line 1 pos 7 -- !query @@ -517,12 +488,7 @@ select +X'1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(+ X'01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'01'' is of binary type.; line 1 pos 7" - } -} +cannot resolve '(+ X'01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'01'' is of binary type.; line 1 pos 7 -- !query @@ -531,12 +497,7 @@ select -date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(- DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7" - } -} +cannot resolve '(- DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7 -- !query @@ -545,12 +506,7 @@ select -timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(- TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7" - } -} +cannot resolve '(- TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7 -- !query @@ -559,9 +515,4 @@ select -x'2379ACFe' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(- X'2379ACFE')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'2379ACFE'' is of binary type.; line 1 pos 7" - } -} +cannot resolve '(- X'2379ACFE')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'2379ACFE'' is of binary type.; line 1 pos 7 diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out index 3caa17edbcd55..cd7cf9a60ce37 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out @@ -69,12 +69,7 @@ select map_contains_key(map('1', 'a', '2', 'b'), 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'map_contains_key(map('1', 'a', '2', 'b'), 1)' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, int].; line 1 pos 7" - } -} +cannot resolve 'map_contains_key(map('1', 'a', '2', 'b'), 1)' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, int].; line 1 pos 7 -- !query @@ -83,9 +78,4 @@ select map_contains_key(map(1, 'a', 2, 'b'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'map_contains_key(map(1, 'a', 2, 'b'), '1')' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, string].; line 1 pos 7" - } -} +cannot resolve 'map_contains_key(map(1, 'a', 2, 'b'), '1')' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, string].; line 1 pos 7 diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out index d6ac00d505d3e..08dcc011f2475 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out @@ -5,12 +5,7 @@ select concat_ws() struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "requirement failed: concat_ws requires at least one argument.; line 1 pos 7" - } -} +requirement failed: concat_ws requires at least one argument.; line 1 pos 7 -- !query @@ -19,12 +14,7 @@ select format_string() struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "requirement failed: format_string() should take at least 1 argument; line 1 pos 7" - } -} +requirement failed: format_string() should take at least 1 argument; line 1 pos 7 -- !query @@ -729,12 +719,7 @@ select decode() struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7" - } -} +Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7 -- !query @@ -743,12 +728,7 @@ select decode(encode('abc', 'utf-8')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7" - } -} +Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7 -- !query @@ -1213,12 +1193,7 @@ select to_binary(null, cast(null as int)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7" - } -} +The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7 -- !query @@ -1227,12 +1202,7 @@ select to_binary('abc', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7" - } -} +The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7 -- !query @@ -1241,12 +1211,7 @@ select to_binary('abc', 'invalidFormat') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid value for the 'format' parameter of function 'to_binary': invalidformat. The value has to be a case-insensitive string literal of 'hex', 'utf-8', or 'base64'." - } -} +Invalid value for the 'format' parameter of function 'to_binary': invalidformat. The value has to be a case-insensitive string literal of 'hex', 'utf-8', or 'base64'. -- !query @@ -1255,9 +1220,4 @@ select to_binary('a!', 'base64') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Last unit does not have enough valid bits" - } -} +Last unit does not have enough valid bits diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out index 8fe94fbe8931e..99d34b67e72af 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out @@ -13,12 +13,12 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7) + +== SQL == +select timestamp '2019-01-01中文' +-------^^^ -- !query @@ -27,12 +27,12 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7) + +== SQL == +select timestamp'4294967297' +-------^^^ -- !query @@ -41,12 +41,12 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7) + +== SQL == +select timestamp'2021-01-01T12:30:4294967297.123456' +-------^^^ -- !query @@ -126,12 +126,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 61) struct<> -- !query output java.time.DateTimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61. If necessary set spark.sql.ansi.enabled to false to bypass this error." - } -} +Invalid value for SecondOfMinute (valid values 0 - 59): 61. If necessary set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -156,12 +151,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 99.999999) struct<> -- !query output java.time.DateTimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99. If necessary set spark.sql.ansi.enabled to false to bypass this error." - } -} +Invalid value for SecondOfMinute (valid values 0 - 59): 99. If necessary set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -170,12 +160,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 999.999999) struct<> -- !query output java.time.DateTimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999. If necessary set spark.sql.ansi.enabled to false to bypass this error." - } -} +Invalid value for SecondOfMinute (valid values 0 - 59): 999. If necessary set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -216,12 +201,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -230,12 +210,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -244,12 +219,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -258,12 +228,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -272,12 +237,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Rounding necessary" - } -} +Rounding necessary -- !query @@ -759,12 +719,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP))' due to data type mismatch: '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP))' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP))' due to data type mismatch: '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP))' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp; line 1 pos 7 -- !query @@ -773,12 +728,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS TIMESTAMP) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: '(CAST('1' AS TIMESTAMP) + TIMESTAMP '2011-11-11 11:11:11')' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS TIMESTAMP) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: '(CAST('1' AS TIMESTAMP) + TIMESTAMP '2011-11-11 11:11:11')' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp; line 1 pos 7 -- !query @@ -787,12 +737,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7 -- !query @@ -801,12 +746,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7" - } -} +cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out index c392aaf19c853..8622b97a20502 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out @@ -141,12 +141,7 @@ SELECT try_add(interval 2 year, interval 2 second) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'INTERVAL '2' YEAR + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2' YEAR' is of interval year type.; line 1 pos 7" - } -} +cannot resolve 'INTERVAL '2' YEAR + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2' YEAR' is of interval year type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/array.sql.out b/sql/core/src/test/resources/sql-tests/results/array.sql.out index b59e18438e700..a0aed1a50e123 100644 --- a/sql/core/src/test/resources/sql-tests/results/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/array.sql.out @@ -128,12 +128,7 @@ select sort_array(array('b', 'd'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'sort_array(array('b', 'd'), '1')' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7" - } -} +cannot resolve 'sort_array(array('b', 'd'), '1')' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7 -- !query @@ -142,12 +137,7 @@ select sort_array(array('b', 'd'), cast(NULL as boolean)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'sort_array(array('b', 'd'), CAST(NULL AS BOOLEAN))' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7" - } -} +cannot resolve 'sort_array(array('b', 'd'), CAST(NULL AS BOOLEAN))' due to data type mismatch: Sort order in second argument requires a boolean literal.; line 1 pos 7 -- !query @@ -307,12 +297,7 @@ select array_size(map('a', 1, 'b', 2)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'array_size(map('a', 1, 'b', 2))' due to data type mismatch: argument 1 requires array type, however, 'map('a', 1, 'b', 2)' is of map type.; line 1 pos 7" - } -} +cannot resolve 'array_size(map('a', 1, 'b', 2))' due to data type mismatch: argument 1 requires array type, however, 'map('a', 1, 'b', 2)' is of map type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out b/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out index 24781f8535a03..84d2e9e50ba4f 100644 --- a/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out @@ -149,12 +149,7 @@ select bit_count("bit count") struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'bit_count('bit count')' due to data type mismatch: argument 1 requires (integral or boolean) type, however, ''bit count'' is of string type.; line 1 pos 7" - } -} +cannot resolve 'bit_count('bit count')' due to data type mismatch: argument 1 requires (integral or boolean) type, however, ''bit count'' is of string type.; line 1 pos 7 -- !query @@ -163,12 +158,7 @@ select bit_count('a') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'bit_count('a')' due to data type mismatch: argument 1 requires (integral or boolean) type, however, ''a'' is of string type.; line 1 pos 7" - } -} +cannot resolve 'bit_count('a')' due to data type mismatch: argument 1 requires (integral or boolean) type, however, ''a'' is of string type.; line 1 pos 7 -- !query @@ -270,12 +260,7 @@ select getbit(11L, -1) struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid bit position: -1 is less than zero" - } -} +Invalid bit position: -1 is less than zero -- !query @@ -284,9 +269,4 @@ select getbit(11L, 64) struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid bit position: 64 exceeds the bit upper limit" - } -} +Invalid bit position: 64 exceeds the bit upper limit diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/cast.sql.out index 8832354912d73..b0f7c304f23c1 100644 --- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out @@ -350,12 +350,12 @@ SELECT CAST(interval 3 month 1 hour AS string) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval 3 month 1 hour(line 1, pos 12)\n\n== SQL ==\nSELECT CAST(interval 3 month 1 hour AS string)\n------------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval 3 month 1 hour(line 1, pos 12) + +== SQL == +SELECT CAST(interval 3 month 1 hour AS string) +------------^^^ -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out index f27c2ac6a3d59..38efdac409231 100644 --- a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out @@ -93,12 +93,7 @@ SELECT CEIL(2.5, null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The 'scale' parameter of function 'ceil' needs to be a int literal.; line 1 pos 7" - } -} +The 'scale' parameter of function 'ceil' needs to be a int literal.; line 1 pos 7 -- !query @@ -107,12 +102,7 @@ SELECT CEIL(2.5, 'a') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The 'scale' parameter of function 'ceil' needs to be a int literal.; line 1 pos 7" - } -} +The 'scale' parameter of function 'ceil' needs to be a int literal.; line 1 pos 7 -- !query @@ -121,12 +111,7 @@ SELECT CEIL(2.5, 0, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function ceil. Expected: 2; Found: 3; line 1 pos 7" - } -} +Invalid number of arguments for function ceil. Expected: 2; Found: 3; line 1 pos 7 -- !query @@ -223,12 +208,7 @@ SELECT FLOOR(2.5, null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The 'scale' parameter of function 'floor' needs to be a int literal.; line 1 pos 7" - } -} +The 'scale' parameter of function 'floor' needs to be a int literal.; line 1 pos 7 -- !query @@ -237,12 +217,7 @@ SELECT FLOOR(2.5, 'a') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The 'scale' parameter of function 'floor' needs to be a int literal.; line 1 pos 7" - } -} +The 'scale' parameter of function 'floor' needs to be a int literal.; line 1 pos 7 -- !query @@ -251,9 +226,4 @@ SELECT FLOOR(2.5, 0, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function floor. Expected: 2; Found: 3; line 1 pos 7" - } -} +Invalid number of arguments for function floor. Expected: 2; Found: 3; line 1 pos 7 diff --git a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out index e1ab99a421ea3..ed5871d38553d 100644 --- a/sql/core/src/test/resources/sql-tests/results/change-column.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/change-column.sql.out @@ -23,12 +23,12 @@ ALTER TABLE test_change CHANGE a struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nOperation not allowed: ALTER TABLE table CHANGE COLUMN requires a TYPE, a SET/DROP, a COMMENT, or a FIRST/AFTER(line 1, pos 0)\n\n== SQL ==\nALTER TABLE test_change CHANGE a\n^^^\n" - } -} + +Operation not allowed: ALTER TABLE table CHANGE COLUMN requires a TYPE, a SET/DROP, a COMMENT, or a FIRST/AFTER(line 1, pos 0) + +== SQL == +ALTER TABLE test_change CHANGE a +^^^ -- !query @@ -47,12 +47,7 @@ ALTER TABLE test_change RENAME COLUMN a TO a1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "RENAME COLUMN is only supported with v2 tables." - } -} +RENAME COLUMN is only supported with v2 tables. -- !query @@ -71,12 +66,7 @@ ALTER TABLE test_change CHANGE a TYPE STRING struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "ALTER TABLE CHANGE COLUMN is not supported for changing column 'a' with type 'IntegerType' to 'a' with type 'StringType'" - } -} +ALTER TABLE CHANGE COLUMN is not supported for changing column 'a' with type 'IntegerType' to 'a' with type 'StringType' -- !query @@ -95,12 +85,7 @@ ALTER TABLE test_change CHANGE a AFTER b struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "ALTER COLUMN ... FIRST | ALTER is only supported with v2 tables." - } -} +ALTER COLUMN ... FIRST | ALTER is only supported with v2 tables. -- !query @@ -109,12 +94,7 @@ ALTER TABLE test_change CHANGE b FIRST struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "ALTER COLUMN ... FIRST | ALTER is only supported with v2 tables." - } -} +ALTER COLUMN ... FIRST | ALTER is only supported with v2 tables. -- !query @@ -193,12 +173,12 @@ ALTER TABLE test_change CHANGE invalid_col TYPE INT struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Missing field invalid_col in table spark_catalog.default.test_change with schema:\nroot\n |-- a: integer (nullable = true)\n |-- b: string (nullable = true)\n |-- c: integer (nullable = true)\n; line 1 pos 0" - } -} +Missing field invalid_col in table spark_catalog.default.test_change with schema: +root + |-- a: integer (nullable = true) + |-- b: string (nullable = true) + |-- c: integer (nullable = true) +; line 1 pos 0 -- !query @@ -243,12 +223,7 @@ ALTER TABLE temp_view CHANGE a TYPE INT struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "temp_view is a temp view. 'ALTER TABLE ... CHANGE COLUMN' expects a table.; line 1 pos 12" - } -} +temp_view is a temp view. 'ALTER TABLE ... CHANGE COLUMN' expects a table.; line 1 pos 12 -- !query @@ -265,12 +240,7 @@ ALTER TABLE global_temp.global_temp_view CHANGE a TYPE INT struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "global_temp.global_temp_view is a temp view. 'ALTER TABLE ... CHANGE COLUMN' expects a table.; line 1 pos 12" - } -} +global_temp.global_temp_view is a temp view. 'ALTER TABLE ... CHANGE COLUMN' expects a table.; line 1 pos 12 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out index 6c4e788d43491..1f79817787a43 100644 --- a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out @@ -259,12 +259,7 @@ alter table char_tbl1 change column c type char(6) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "ALTER TABLE CHANGE COLUMN is not supported for changing column 'c' with type 'CharType(5)' to 'c' with type 'CharType(6)'" - } -} +ALTER TABLE CHANGE COLUMN is not supported for changing column 'c' with type 'CharType(5)' to 'c' with type 'CharType(6)' -- !query @@ -580,12 +575,7 @@ alter table char_part partition (v2='ke') rename to partition (v2='nt') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Partition spec is invalid. The spec (v2) must match the partition spec (v2, c2) defined in table '`spark_catalog`.`default`.`char_part`'" - } -} +Partition spec is invalid. The spec (v2) must match the partition spec (v2, c2) defined in table '`spark_catalog`.`default`.`char_part`' -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out index 427a60b1b7d30..1eaf5f03f58b7 100644 --- a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out @@ -69,12 +69,7 @@ SELECT i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7" - } -} +Reference 'i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7 -- !query @@ -83,12 +78,7 @@ SELECT t1.i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7" - } -} +Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7 -- !query @@ -97,12 +87,7 @@ SELECT mydb1.t1.i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'mydb1.t1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7" - } -} +Reference 'mydb1.t1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7 -- !query @@ -111,12 +96,7 @@ SELECT i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7" - } -} +Reference 'i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7 -- !query @@ -125,12 +105,7 @@ SELECT t1.i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7" - } -} +Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb1.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7 -- !query @@ -147,12 +122,7 @@ SELECT i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7" - } -} +Reference 'i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7 -- !query @@ -161,12 +131,7 @@ SELECT t1.i1 FROM t1, mydb1.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7" - } -} +Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb1.t1.i1.; line 1 pos 7 -- !query @@ -175,12 +140,7 @@ SELECT i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7" - } -} +Reference 'i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7 -- !query @@ -189,12 +149,7 @@ SELECT t1.i1 FROM t1, mydb2.t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7" - } -} +Reference 't1.i1' is ambiguous, could be: spark_catalog.mydb2.t1.i1, spark_catalog.mydb2.t1.i1.; line 1 pos 7 -- !query @@ -251,12 +206,7 @@ SELECT t1.x.y.* FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 't1.x.y.*' given input columns 'i1'; line 1 pos 7" - } -} +cannot resolve 't1.x.y.*' given input columns 'i1'; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/comments.sql.out b/sql/core/src/test/resources/sql-tests/results/comments.sql.out index 2f56e832a8b45..f05d188740fae 100644 --- a/sql/core/src/test/resources/sql-tests/results/comments.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/comments.sql.out @@ -132,12 +132,20 @@ select 1 as a struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nUnclosed bracketed comment(line 3, pos 0)\n\n== SQL ==\n/*abc*/\nselect 1 as a\n/*\n^^^\n\n2 as b\n/*abc*/\n, 3 as c\n\n/**/\n" - } -} + +Unclosed bracketed comment(line 3, pos 0) + +== SQL == +/*abc*/ +select 1 as a +/* +^^^ + +2 as b +/*abc*/ +, 3 as c + +/**/ -- !query @@ -155,9 +163,18 @@ select 4 as d struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nUnclosed bracketed comment(line 3, pos 0)\n\n== SQL ==\n/*abc*/\nselect 1 as a\n/*\n^^^\n\n2 as b\n/*abc*/\n, 3 as c\n\n/**/\nselect 4 as d\n" - } -} + +Unclosed bracketed comment(line 3, pos 0) + +== SQL == +/*abc*/ +select 1 as a +/* +^^^ + +2 as b +/*abc*/ +, 3 as c + +/**/ +select 4 as d diff --git a/sql/core/src/test/resources/sql-tests/results/count.sql.out b/sql/core/src/test/resources/sql-tests/results/count.sql.out index d8cee910c95d2..ab9b543a9f80b 100644 --- a/sql/core/src/test/resources/sql-tests/results/count.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/count.sql.out @@ -146,12 +146,7 @@ SELECT count() FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'count()' due to data type mismatch: count requires at least one argument. If you have to call the function count without arguments, set the legacy configuration `spark.sql.legacy.allowParameterlessCount` as true; line 1 pos 7" - } -} +cannot resolve 'count()' due to data type mismatch: count requires at least one argument. If you have to call the function count without arguments, set the legacy configuration `spark.sql.legacy.allowParameterlessCount` as true; line 1 pos 7 -- !query @@ -184,9 +179,4 @@ SELECT count(testData.*) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "count(testData.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2)" - } -} +count(testData.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2) diff --git a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out index 2fb9bfb4643f6..301e5cc78df4b 100644 --- a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out @@ -21,12 +21,7 @@ select from_csv('1', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The expression '1' is not a valid schema string.; line 1 pos 7" - } -} +The expression '1' is not a valid schema string.; line 1 pos 7 -- !query @@ -35,12 +30,20 @@ select from_csv('1', 'a InvalidType') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot parse the data type: \n[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n\nFailed fallback parsing: \nDataType invalidtype is not supported.(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n; line 1 pos 7" - } -} +Cannot parse the data type: +[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2) + +== SQL == +a InvalidType +--^^^ + +Failed fallback parsing: +DataType invalidtype is not supported.(line 1, pos 2) + +== SQL == +a InvalidType +--^^^ +; line 1 pos 7 -- !query @@ -49,12 +52,7 @@ select from_csv('1', 'a INT', named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Must use a map() function for options; line 1 pos 7" - } -} +Must use a map() function for options; line 1 pos 7 -- !query @@ -63,12 +61,7 @@ select from_csv('1', 'a INT', map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "A type of keys and values in map() must be string, but got map; line 1 pos 7" - } -} +A type of keys and values in map() must be string, but got map; line 1 pos 7 -- !query @@ -77,12 +70,7 @@ select from_csv() struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function from_csv. Expected: one of 2 and 3; Found: 0; line 1 pos 7" - } -} +Invalid number of arguments for function from_csv. Expected: one of 2 and 3; Found: 0; line 1 pos 7 -- !query @@ -107,12 +95,7 @@ select schema_of_csv(null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'schema_of_csv(NULL)' due to data type mismatch: The input csv should be a foldable string expression and not null; however, got NULL.; line 1 pos 7" - } -} +cannot resolve 'schema_of_csv(NULL)' due to data type mismatch: The input csv should be a foldable string expression and not null; however, got NULL.; line 1 pos 7 -- !query @@ -129,12 +112,7 @@ SELECT schema_of_csv(csvField) FROM csvTable struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'schema_of_csv(csvtable.csvField)' due to data type mismatch: The input csv should be a foldable string expression and not null; however, got csvtable.csvField.; line 1 pos 7" - } -} +cannot resolve 'schema_of_csv(csvtable.csvField)' due to data type mismatch: The input csv should be a foldable string expression and not null; however, got csvtable.csvField.; line 1 pos 7 -- !query @@ -167,12 +145,7 @@ select to_csv(named_struct('a', 1, 'b', 2), named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Must use a map() function for options; line 1 pos 7" - } -} +Must use a map() function for options; line 1 pos 7 -- !query @@ -181,9 +154,4 @@ select to_csv(named_struct('a', 1, 'b', 2), map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "A type of keys and values in map() must be string, but got map; line 1 pos 7" - } -} +A type of keys and values in map() must be string, but got map; line 1 pos 7 diff --git a/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out index 5825aa1b6acc1..34f11d9da53f6 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte-legacy.sql.out @@ -232,9 +232,4 @@ SELECT * FROM t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view not found: t1; line 5 pos 20" - } -} +Table or view not found: t1; line 5 pos 20 diff --git a/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out b/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out index 61cd8deca4e83..13b4d10304e3a 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte-nested.sql.out @@ -45,12 +45,7 @@ SELECT * FROM t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228." - } -} +Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. -- !query @@ -87,12 +82,7 @@ SELECT * FROM t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228." - } -} +Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. -- !query @@ -146,12 +136,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228." - } -} +Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. -- !query @@ -166,12 +151,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228." - } -} +Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. -- !query @@ -187,12 +167,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228." - } -} +Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. -- !query @@ -206,12 +181,7 @@ WHERE c IN ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228." - } -} +Name t is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. -- !query @@ -240,12 +210,7 @@ SELECT * FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228." - } -} +Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. -- !query @@ -258,12 +223,7 @@ SELECT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228." - } -} +Name aBc is ambiguous in nested CTE. Please set spark.sql.legacy.ctePrecedencePolicy to CORRECTED so that name defined in inner CTE takes precedence. If set it to LEGACY, outer CTE definitions will take precedence. See more details in SPARK-28228. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/cte.sql.out b/sql/core/src/test/resources/sql-tests/results/cte.sql.out index f7765b5dd305c..f88fd884ab782 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte.sql.out @@ -21,12 +21,7 @@ WITH s AS (SELECT 1 FROM s) SELECT * FROM s struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view not found: s; line 1 pos 25" - } -} +Table or view not found: s; line 1 pos 25 -- !query @@ -36,12 +31,7 @@ SELECT * FROM r struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view not found: r; line 1 pos 33" - } -} +Table or view not found: r; line 1 pos 33 -- !query @@ -60,12 +50,7 @@ WITH s1 AS (SELECT 1 FROM s2), s2 AS (SELECT 1 FROM s1) SELECT * FROM s1, s2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view not found: s2; line 1 pos 26" - } -} +Table or view not found: s2; line 1 pos 26 -- !query @@ -163,12 +148,15 @@ SELECT * FROM t struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCTE definition can't have duplicate names: 't'.(line 1, pos 0)\n\n== SQL ==\nWITH\n^^^\n t(x) AS (SELECT 1),\n t(x) AS (SELECT 2)\nSELECT * FROM t\n" - } -} + +CTE definition can't have duplicate names: 't'.(line 1, pos 0) + +== SQL == +WITH +^^^ + t(x) AS (SELECT 1), + t(x) AS (SELECT 2) +SELECT * FROM t -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out b/sql/core/src/test/resources/sql-tests/results/date.sql.out index 6181910296b97..e1dbb7799e672 100644 --- a/sql/core/src/test/resources/sql-tests/results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out @@ -21,12 +21,12 @@ select date '2020-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 2020-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect date '2020-01-01中文'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 2020-01-01中文(line 1, pos 7) + +== SQL == +select date '2020-01-01中文' +-------^^^ -- !query @@ -59,12 +59,12 @@ select date'015' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 015(line 1, pos 7)\n\n== SQL ==\nselect date'015'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 015(line 1, pos 7) + +== SQL == +select date'015' +-------^^^ -- !query @@ -73,12 +73,12 @@ select date'2021-4294967297-11' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)\n\n== SQL ==\nselect date'2021-4294967297-11'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 2021-4294967297-11(line 1, pos 7) + +== SQL == +select date'2021-4294967297-11' +-------^^^ -- !query @@ -281,12 +281,7 @@ select date_add('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 -- !query @@ -295,12 +290,7 @@ select date_add('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 -- !query @@ -309,12 +299,7 @@ select date_add('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 -- !query @@ -410,12 +395,7 @@ select date_sub('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 -- !query @@ -424,12 +404,7 @@ select date_sub('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 -- !query @@ -438,12 +413,7 @@ select date_sub('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 -- !query @@ -507,12 +477,7 @@ select date_add('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7 -- !query @@ -521,12 +486,7 @@ select date_sub('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7 -- !query @@ -551,12 +511,7 @@ select date '2011-11-11' + 1E1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 -- !query @@ -597,12 +552,7 @@ select date '2001-10-01' - '2001-09-28' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(DATE '2001-10-01', CAST('2001-09-28' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('2001-09-28' AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(DATE '2001-10-01', CAST('2001-09-28' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('2001-09-28' AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -643,12 +593,7 @@ select date '2001-09-28' - date_str from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(DATE '2001-09-28', CAST(date_view.date_str AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(date_view.date_str AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(DATE '2001-09-28', CAST(date_view.date_str AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(date_view.date_str AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -657,12 +602,7 @@ select date'2011-11-11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -671,12 +611,7 @@ select '1' + date'2011-11-11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out index ff53bc93f14f1..da0a801c4badc 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-invalid.sql.out @@ -37,12 +37,7 @@ select date_format('2018-11-17 13:33:33.333', 'qqqqq') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Too many pattern letters: q" - } -} +Too many pattern letters: q -- !query @@ -51,12 +46,7 @@ select date_format('2018-11-17 13:33:33.333', 'QQQQQ') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Too many pattern letters: Q" - } -} +Too many pattern letters: Q -- !query @@ -289,12 +279,7 @@ select date_format('2018-11-17 13:33:33.333', 'V') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Pattern letter count must be 2: V" - } -} +Pattern letter count must be 2: V -- !query @@ -319,12 +304,7 @@ select date_format('2018-11-17 13:33:33.333', 'XXXXXX') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Too many pattern letters: X" - } -} +Too many pattern letters: X -- !query @@ -349,12 +329,7 @@ select date_format('2018-11-17 13:33:33.333', 'OO') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Pattern letter count must be 1 or 4: O" - } -} +Pattern letter count must be 1 or 4: O -- !query @@ -363,12 +338,7 @@ select date_format('2018-11-17 13:33:33.333', 'xxxxxx') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Too many pattern letters: x" - } -} +Too many pattern letters: x -- !query @@ -377,12 +347,7 @@ select date_format('2018-11-17 13:33:33.333', 'A') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Illegal pattern character: A" - } -} +Illegal pattern character: A -- !query @@ -391,12 +356,7 @@ select date_format('2018-11-17 13:33:33.333', 'n') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Illegal pattern character: n" - } -} +Illegal pattern character: n -- !query @@ -405,12 +365,7 @@ select date_format('2018-11-17 13:33:33.333', 'N') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Illegal pattern character: N" - } -} +Illegal pattern character: N -- !query @@ -419,12 +374,7 @@ select date_format('2018-11-17 13:33:33.333', 'p') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Illegal pattern character: p" - } -} +Illegal pattern character: p -- !query @@ -497,12 +447,7 @@ select date_format('2018-11-17 13:33:33.333', 'e') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "All week-based patterns are unsupported since Spark 3.0, detected: e, Please use the SQL function EXTRACT instead" - } -} +All week-based patterns are unsupported since Spark 3.0, detected: e, Please use the SQL function EXTRACT instead -- !query @@ -511,12 +456,7 @@ select date_format('2018-11-17 13:33:33.333', 'c') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "All week-based patterns are unsupported since Spark 3.0, detected: c, Please use the SQL function EXTRACT instead" - } -} +All week-based patterns are unsupported since Spark 3.0, detected: c, Please use the SQL function EXTRACT instead -- !query @@ -525,12 +465,7 @@ select date_format('2018-11-17 13:33:33.333', 'B') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Illegal pattern character: B" - } -} +Illegal pattern character: B -- !query @@ -539,12 +474,7 @@ select date_format('2018-11-17 13:33:33.333', 'C') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Unknown pattern letter: C" - } -} +Unknown pattern letter: C -- !query @@ -553,9 +483,4 @@ select date_format('2018-11-17 13:33:33.333', 'I') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Unknown pattern letter: I" - } -} +Unknown pattern letter: I diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-legacy.sql.out index 811d634bfdf75..ed43af9cb518e 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-formatting-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-formatting-legacy.sql.out @@ -48,12 +48,7 @@ select col, date_format(col, 'q qq') from v struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Illegal pattern character 'q'" - } -} +Illegal pattern character 'q' -- !query @@ -62,12 +57,7 @@ select col, date_format(col, 'Q QQ QQQ QQQQ') from v struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Illegal pattern character 'Q'" - } -} +Illegal pattern character 'Q' -- !query @@ -280,12 +270,7 @@ select col, date_format(col, 'VV') from v struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Illegal pattern character 'V'" - } -} +Illegal pattern character 'V' -- !query @@ -322,12 +307,7 @@ select col, date_format(col, 'XXXX XXXXX') from v struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "invalid ISO 8601 format: length=4" - } -} +invalid ISO 8601 format: length=4 -- !query @@ -350,12 +330,7 @@ select col, date_format(col, 'O OOOO') from v struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Illegal pattern character 'O'" - } -} +Illegal pattern character 'O' -- !query @@ -364,12 +339,7 @@ select col, date_format(col, 'x xx xxx xxxx xxxx xxxxx') from v struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Illegal pattern character 'x'" - } -} +Illegal pattern character 'x' -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index 28672242471e9..e801a165d2e14 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -21,12 +21,12 @@ select date '2020-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 2020-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect date '2020-01-01中文'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 2020-01-01中文(line 1, pos 7) + +== SQL == +select date '2020-01-01中文' +-------^^^ -- !query @@ -59,12 +59,12 @@ select date'015' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 015(line 1, pos 7)\n\n== SQL ==\nselect date'015'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 015(line 1, pos 7) + +== SQL == +select date'015' +-------^^^ -- !query @@ -73,12 +73,12 @@ select date'2021-4294967297-11' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)\n\n== SQL ==\nselect date'2021-4294967297-11'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 2021-4294967297-11(line 1, pos 7) + +== SQL == +select date'2021-4294967297-11' +-------^^^ -- !query @@ -281,12 +281,7 @@ select date_add('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 -- !query @@ -295,12 +290,7 @@ select date_add('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 -- !query @@ -309,12 +299,7 @@ select date_add('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 -- !query @@ -410,12 +395,7 @@ select date_sub('2011-11-11', 1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7 -- !query @@ -424,12 +404,7 @@ select date_sub('2011-11-11', 1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 -- !query @@ -438,12 +413,7 @@ select date_sub('2011-11-11', 1E1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 -- !query @@ -507,12 +477,7 @@ select date_add('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7 -- !query @@ -521,12 +486,7 @@ select date_sub('2011-11-11', int_str) from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), date_view.int_str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'date_view.int_str' is of string type.; line 1 pos 7 -- !query @@ -551,12 +511,7 @@ select date '2011-11-11' + 1E1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7 -- !query @@ -597,12 +552,7 @@ select date '2001-10-01' - '2001-09-28' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(DATE '2001-10-01', CAST('2001-09-28' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('2001-09-28' AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(DATE '2001-10-01', CAST('2001-09-28' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('2001-09-28' AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -643,12 +593,7 @@ select date '2001-09-28' - date_str from date_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(DATE '2001-09-28', CAST(date_view.date_str AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(date_view.date_str AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(DATE '2001-09-28', CAST(date_view.date_str AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(date_view.date_str AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -657,12 +602,7 @@ select date'2011-11-11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -671,12 +611,7 @@ select '1' + date'2011-11-11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -906,12 +841,12 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7) + +== SQL == +select timestamp '2019-01-01中文' +-------^^^ -- !query @@ -920,12 +855,12 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7) + +== SQL == +select timestamp'4294967297' +-------^^^ -- !query @@ -934,12 +869,12 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7) + +== SQL == +select timestamp'2021-01-01T12:30:4294967297.123456' +-------^^^ -- !query @@ -1084,12 +1019,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -1098,12 +1028,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -1112,12 +1037,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -1126,12 +1046,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -1140,12 +1055,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Rounding necessary" - } -} +Rounding necessary -- !query @@ -1523,12 +1433,7 @@ select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7 -- !query @@ -1537,12 +1442,7 @@ select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7" - } -} +cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7 -- !query @@ -1575,12 +1475,7 @@ select str - timestamp'2011-11-11 11:11:11' from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(ts_view.str - TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7" - } -} +cannot resolve '(ts_view.str - TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7 -- !query @@ -1589,12 +1484,7 @@ select timestamp'2011-11-11 11:11:11' - str from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7 -- !query @@ -1603,12 +1493,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 -- !query @@ -1617,12 +1502,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' (double and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' (double and timestamp).; line 1 pos 7 -- !query @@ -1631,12 +1511,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7 -- !query @@ -1645,12 +1520,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7" - } -} +cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out index dd88c8c0507ce..79cf395abbabb 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out @@ -5,12 +5,7 @@ select to_timestamp('294248', 'y') struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/describe.sql.out b/sql/core/src/test/resources/sql-tests/results/describe.sql.out index 08ed26cc9f120..5cbc80d932e85 100644 --- a/sql/core/src/test/resources/sql-tests/results/describe.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/describe.sql.out @@ -362,12 +362,9 @@ DESC t PARTITION (c='Us', d=2) struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Partition not found in table 't' database 'default':\nc -> Us\nd -> 2" - } -} +Partition not found in table 't' database 'default': +c -> Us +d -> 2 -- !query @@ -376,12 +373,7 @@ DESC t PARTITION (c='Us') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`default`.`t`'" - } -} +Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`default`.`t`' -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/except-all.sql.out b/sql/core/src/test/resources/sql-tests/results/except-all.sql.out index 4c9b0345bf647..c510ad1d8314d 100644 --- a/sql/core/src/test/resources/sql-tests/results/except-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/except-all.sql.out @@ -138,12 +138,7 @@ SELECT array(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "ExceptAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table" - } -} +ExceptAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table -- !query @@ -215,12 +210,7 @@ SELECT k, v FROM tab4 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "ExceptAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns" - } -} +ExceptAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/extract.sql.out b/sql/core/src/test/resources/sql-tests/results/extract.sql.out index 5a9511feda72b..890c31f81dbd4 100644 --- a/sql/core/src/test/resources/sql-tests/results/extract.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/extract.sql.out @@ -317,12 +317,7 @@ select extract(not_supported from c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7" - } -} +Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7 -- !query @@ -331,12 +326,7 @@ select extract(not_supported from i) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7" - } -} +Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7 -- !query @@ -345,12 +335,7 @@ select extract(not_supported from j) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7" - } -} +Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7 -- !query @@ -663,12 +648,7 @@ select date_part('not_supported', c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7" - } -} +Literals of type 'not_supported' are currently not supported for the string type.; line 1 pos 7 -- !query @@ -677,12 +657,7 @@ select date_part(c, c) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The 'field' parameter of function 'date_part' needs to be a string literal.; line 1 pos 7" - } -} +The 'field' parameter of function 'date_part' needs to be a string literal.; line 1 pos 7 -- !query @@ -699,12 +674,7 @@ select date_part(i, i) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The 'field' parameter of function 'date_part' needs to be a string literal.; line 1 pos 7" - } -} +The 'field' parameter of function 'date_part' needs to be a string literal.; line 1 pos 7 -- !query @@ -913,12 +883,7 @@ select extract(DAY from interval '2-1' YEAR TO MONTH) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7" - } -} +Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7 -- !query @@ -927,12 +892,7 @@ select date_part('DAY', interval '2-1' YEAR TO MONTH) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7" - } -} +Literals of type 'DAY' are currently not supported for the interval year to month type.; line 1 pos 7 -- !query @@ -941,12 +901,7 @@ select date_part('not_supported', interval '2-1' YEAR TO MONTH) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7" - } -} +Literals of type 'not_supported' are currently not supported for the interval year to month type.; line 1 pos 7 -- !query @@ -1059,12 +1014,7 @@ select extract(MONTH from interval '123 12:34:56.789123123' DAY TO SECOND) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Literals of type 'MONTH' are currently not supported for the interval day to second type.; line 1 pos 7" - } -} +Literals of type 'MONTH' are currently not supported for the interval day to second type.; line 1 pos 7 -- !query @@ -1073,9 +1023,4 @@ select date_part('not_supported', interval '123 12:34:56.789123123' DAY TO SECON struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7" - } -} +Literals of type 'not_supported' are currently not supported for the interval day to second type.; line 1 pos 7 diff --git a/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out b/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out index 00b044043e081..1090fd27603e4 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-analytics.sql.out @@ -131,12 +131,12 @@ SELECT course, year, SUM(earnings) FROM courseSales GROUP BY ROLLUP(course, year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nEmpty set in ROLLUP grouping sets is not supported.(line 1, pos 61)\n\n== SQL ==\nSELECT course, year, SUM(earnings) FROM courseSales GROUP BY ROLLUP(course, year, (course, year), ()) ORDER BY course, year\n-------------------------------------------------------------^^^\n" - } -} + +Empty set in ROLLUP grouping sets is not supported.(line 1, pos 61) + +== SQL == +SELECT course, year, SUM(earnings) FROM courseSales GROUP BY ROLLUP(course, year, (course, year), ()) ORDER BY course, year +-------------------------------------------------------------^^^ -- !query @@ -193,12 +193,12 @@ SELECT course, year, SUM(earnings) FROM courseSales GROUP BY CUBE(course, year, struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nEmpty set in CUBE grouping sets is not supported.(line 1, pos 61)\n\n== SQL ==\nSELECT course, year, SUM(earnings) FROM courseSales GROUP BY CUBE(course, year, (course, year), ()) ORDER BY course, year\n-------------------------------------------------------------^^^\n" - } -} + +Empty set in CUBE grouping sets is not supported.(line 1, pos 61) + +== SQL == +SELECT course, year, SUM(earnings) FROM courseSales GROUP BY CUBE(course, year, (course, year), ()) ORDER BY course, year +-------------------------------------------------------------^^^ -- !query @@ -451,12 +451,7 @@ SELECT course, year, GROUPING(course) FROM courseSales GROUP BY course, year struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "grouping() can only be used with GroupingSets/Cube/Rollup" - } -} +grouping() can only be used with GroupingSets/Cube/Rollup -- !query @@ -465,12 +460,7 @@ SELECT course, year, GROUPING_ID(course, year) FROM courseSales GROUP BY course, struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "grouping_id() can only be used with GroupingSets/Cube/Rollup" - } -} +grouping_id() can only be used with GroupingSets/Cube/Rollup -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out index a05dda52caac8..cd47c70ea8591 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by-filter.sql.out @@ -48,12 +48,7 @@ SELECT a, COUNT(b) FILTER (WHERE a >= 2) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(count(testdata.b) FILTER (WHERE (testdata.a >= 2)) AS `count(b) FILTER (WHERE (a >= 2))`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get." - } -} +grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(count(testdata.b) FILTER (WHERE (testdata.a >= 2)) AS `count(b) FILTER (WHERE (a >= 2))`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get. -- !query @@ -233,12 +228,7 @@ SELECT a, COUNT(b) FILTER (WHERE a != 2) FROM testData GROUP BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get." - } -} +expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. -- !query @@ -718,12 +708,7 @@ SELECT a + 2, COUNT(b) FILTER (WHERE b IN (1, 2)) FROM testData GROUP BY a + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get." - } -} +expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out index 7761cd1ad7369..0e4ec436b3b7c 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by-ordinal.sql.out @@ -92,12 +92,7 @@ select a, b from data group by -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "GROUP BY position -1 is not in select list (valid range is [1, 2]); line 1 pos 31" - } -} +GROUP BY position -1 is not in select list (valid range is [1, 2]); line 1 pos 31 -- !query @@ -106,12 +101,7 @@ select a, b from data group by 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "GROUP BY position 0 is not in select list (valid range is [1, 2]); line 1 pos 31" - } -} +GROUP BY position 0 is not in select list (valid range is [1, 2]); line 1 pos 31 -- !query @@ -120,12 +110,7 @@ select a, b from data group by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 31" - } -} +GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 31 -- !query @@ -134,12 +119,7 @@ select a, b, sum(b) from data group by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got sum(data.b) AS `sum(b)`; line 1 pos 39" - } -} +GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got sum(data.b) AS `sum(b)`; line 1 pos 39 -- !query @@ -148,12 +128,7 @@ select a, b, sum(b) + 2 from data group by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got (sum(data.b) + CAST(2 AS BIGINT)) AS `(sum(b) + 2)`; line 1 pos 43" - } -} +GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got (sum(data.b) + CAST(2 AS BIGINT)) AS `(sum(b) + 2)`; line 1 pos 43 -- !query @@ -177,12 +152,7 @@ select * from data group by a, b, 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Star (*) is not allowed in select list when GROUP BY ordinal position is used" - } -} +Star (*) is not allowed in select list when GROUP BY ordinal position is used -- !query @@ -379,12 +349,7 @@ select a, b, count(1) from data group by a, -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "GROUP BY position -1 is not in select list (valid range is [1, 3]); line 1 pos 44" - } -} +GROUP BY position -1 is not in select list (valid range is [1, 3]); line 1 pos 44 -- !query @@ -393,12 +358,7 @@ select a, b, count(1) from data group by a, 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got count(1) AS `count(1)`; line 1 pos 44" - } -} +GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got count(1) AS `count(1)`; line 1 pos 44 -- !query @@ -407,12 +367,7 @@ select a, b, count(1) from data group by cube(-1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "GROUP BY position -1 is not in select list (valid range is [1, 3]); line 1 pos 46" - } -} +GROUP BY position -1 is not in select list (valid range is [1, 3]); line 1 pos 46 -- !query @@ -421,12 +376,7 @@ select a, b, count(1) from data group by cube(1, 3) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got count(1) AS `count(1)`; line 1 pos 49" - } -} +GROUP BY 3 refers to an expression that is or contains an aggregate function. Aggregate functions are not allowed in GROUP BY, but got count(1) AS `count(1)`; line 1 pos 49 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out index 1b53894949056..2b5e8cfcc5095 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out @@ -15,12 +15,7 @@ SELECT a, COUNT(b) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(count(testdata.b) AS `count(b)`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get." - } -} +grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(count(testdata.b) AS `count(b)`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get. -- !query @@ -48,12 +43,7 @@ SELECT a, COUNT(b) FROM testData GROUP BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get." - } -} +expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. -- !query @@ -117,12 +107,7 @@ SELECT a + 2, COUNT(b) FROM testData GROUP BY a + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get." - } -} +expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. -- !query @@ -195,12 +180,7 @@ SELECT COUNT(b) AS k FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "aggregate functions are not allowed in GROUP BY, but found count(testdata.b)" - } -} +aggregate functions are not allowed in GROUP BY, but found count(testdata.b) -- !query @@ -218,12 +198,7 @@ SELECT k AS a, COUNT(v) FROM testDataHasSameNameWithAlias GROUP BY a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expression 'testdatahassamenamewithalias.k' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get." - } -} +expression 'testdatahassamenamewithalias.k' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. -- !query @@ -319,12 +294,7 @@ SELECT id FROM range(10) HAVING id > 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "grouping expressions sequence is empty, and 'id' is not an aggregate function. Wrap '()' in windowing function(s) or wrap 'id' in first() (or first_value) if you don't care which value you get." - } -} +grouping expressions sequence is empty, and 'id' is not an aggregate function. Wrap '()' in windowing function(s) or wrap 'id' in first() (or first_value) if you don't care which value you get. -- !query @@ -358,12 +328,10 @@ SELECT 1 FROM range(10) HAVING MAX(id) > 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(max(id) > CAST(0 AS BIGINT))]\nInvalid expressions: [max(id)]" - } -} + +Aggregate/Window/Generate expressions are not valid in where clause of the query. +Expression in where clause: [(max(id) > CAST(0 AS BIGINT))] +Invalid expressions: [max(id)] -- !query @@ -493,12 +461,7 @@ SELECT every(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'every(1)' due to data type mismatch: argument 1 requires boolean type, however, '1' is of int type.; line 1 pos 7" - } -} +cannot resolve 'every(1)' due to data type mismatch: argument 1 requires boolean type, however, '1' is of int type.; line 1 pos 7 -- !query @@ -507,12 +470,7 @@ SELECT some(1S) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'some(1S)' due to data type mismatch: argument 1 requires boolean type, however, '1S' is of smallint type.; line 1 pos 7" - } -} +cannot resolve 'some(1S)' due to data type mismatch: argument 1 requires boolean type, however, '1S' is of smallint type.; line 1 pos 7 -- !query @@ -521,12 +479,7 @@ SELECT any(1L) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'any(1L)' due to data type mismatch: argument 1 requires boolean type, however, '1L' is of bigint type.; line 1 pos 7" - } -} +cannot resolve 'any(1L)' due to data type mismatch: argument 1 requires boolean type, however, '1L' is of bigint type.; line 1 pos 7 -- !query @@ -535,12 +488,7 @@ SELECT every("true") struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'every('true')' due to data type mismatch: argument 1 requires boolean type, however, ''true'' is of string type.; line 1 pos 7" - } -} +cannot resolve 'every('true')' due to data type mismatch: argument 1 requires boolean type, however, ''true'' is of string type.; line 1 pos 7 -- !query @@ -549,12 +497,7 @@ SELECT bool_and(1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'bool_and(1.0BD)' due to data type mismatch: argument 1 requires boolean type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7" - } -} +cannot resolve 'bool_and(1.0BD)' due to data type mismatch: argument 1 requires boolean type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 -- !query @@ -563,12 +506,7 @@ SELECT bool_or(1.0D) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'bool_or(1.0D)' due to data type mismatch: argument 1 requires boolean type, however, '1.0D' is of double type.; line 1 pos 7" - } -} +cannot resolve 'bool_or(1.0D)' due to data type mismatch: argument 1 requires boolean type, however, '1.0D' is of double type.; line 1 pos 7 -- !query @@ -688,12 +626,10 @@ SELECT count(*) FROM test_agg WHERE count(*) > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(count(1) > 1L)]\nInvalid expressions: [count(1)]" - } -} + +Aggregate/Window/Generate expressions are not valid in where clause of the query. +Expression in where clause: [(count(1) > 1L)] +Invalid expressions: [count(1)] -- !query @@ -702,12 +638,10 @@ SELECT count(*) FROM test_agg WHERE count(*) + 1L > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [((count(1) + 1L) > 1L)]\nInvalid expressions: [count(1)]" - } -} + +Aggregate/Window/Generate expressions are not valid in where clause of the query. +Expression in where clause: [((count(1) + 1L) > 1L)] +Invalid expressions: [count(1)] -- !query @@ -716,12 +650,10 @@ SELECT count(*) FROM test_agg WHERE k = 1 or k = 2 or count(*) + 1L > 1L or max( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(((test_agg.k = 1) OR (test_agg.k = 2)) OR (((count(1) + 1L) > 1L) OR (max(test_agg.k) > 1)))]\nInvalid expressions: [count(1), max(test_agg.k)]" - } -} + +Aggregate/Window/Generate expressions are not valid in where clause of the query. +Expression in where clause: [(((test_agg.k = 1) OR (test_agg.k = 2)) OR (((count(1) + 1L) > 1L) OR (max(test_agg.k) > 1)))] +Invalid expressions: [count(1), max(test_agg.k)] -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out b/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out index f0406f587bf15..ff498383d632c 100644 --- a/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/grouping_set.sql.out @@ -166,12 +166,7 @@ SELECT c1 FROM (values (1,2), (3,2)) t(c1, c2) GROUP BY GROUPING SETS (()) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expression 't.c1' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get." - } -} +expression 't.c1' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/having.sql.out b/sql/core/src/test/resources/sql-tests/results/having.sql.out index c326758f6a895..e9e24562d1ba4 100644 --- a/sql/core/src/test/resources/sql-tests/results/having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/having.sql.out @@ -35,12 +35,7 @@ SELECT count(k) FROM hav GROUP BY v HAVING v = array(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(hav.v = array(1))' due to data type mismatch: differing types in '(hav.v = array(1))' (int and array).; line 1 pos 43" - } -} +cannot resolve '(hav.v = array(1))' due to data type mismatch: differing types in '(hav.v = array(1))' (int and array).; line 1 pos 43 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out index 7c22d70e216df..c6bbb4fb7179a 100644 --- a/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/higher-order-functions.sql.out @@ -17,12 +17,7 @@ select upper(x -> x) as v struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "A lambda function should only be used in a higher order function. However, its class is org.apache.spark.sql.catalyst.expressions.Upper, which is not a higher order function.; line 1 pos 7" - } -} +A lambda function should only be used in a higher order function. However, its class is org.apache.spark.sql.catalyst.expressions.Upper, which is not a higher order function.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out b/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out index d3ea06b098fc8..54dd03d32ea50 100644 --- a/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ilike-all.sql.out @@ -129,9 +129,9 @@ SELECT company FROM ilike_any_table WHERE company ILIKE ALL () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nExpected something between '(' and ')'.(line 1, pos 50)\n\n== SQL ==\nSELECT company FROM ilike_any_table WHERE company ILIKE ALL ()\n--------------------------------------------------^^^\n" - } -} + +Expected something between '(' and ')'.(line 1, pos 50) + +== SQL == +SELECT company FROM ilike_any_table WHERE company ILIKE ALL () +--------------------------------------------------^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out b/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out index 81fa521b7cb1a..91a2f40386457 100644 --- a/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ilike-any.sql.out @@ -135,9 +135,9 @@ SELECT company FROM ilike_any_table WHERE company ILIKE ANY () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nExpected something between '(' and ')'.(line 1, pos 50)\n\n== SQL ==\nSELECT company FROM ilike_any_table WHERE company ILIKE ANY ()\n--------------------------------------------------^^^\n" - } -} + +Expected something between '(' and ')'.(line 1, pos 50) + +== SQL == +SELECT company FROM ilike_any_table WHERE company ILIKE ANY () +--------------------------------------------------^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out b/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out index 09554310f9a69..f2eee23a52cdd 100644 --- a/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/inline-table.sql.out @@ -110,12 +110,7 @@ select * from values ("one", rand(5)), ("two", 3.0D) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot evaluate expression rand(5) in inline table definition; line 1 pos 29" - } -} +cannot evaluate expression rand(5) in inline table definition; line 1 pos 29 -- !query @@ -124,12 +119,7 @@ select * from values ("one", 2.0), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expected 2 columns but found 1 columns in row 1; line 1 pos 14" - } -} +expected 2 columns but found 1 columns in row 1; line 1 pos 14 -- !query @@ -138,12 +128,7 @@ select * from values ("one", array(0, 1)), ("two", struct(1, 2)) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "incompatible types found in column b for inline table; line 1 pos 14" - } -} +incompatible types found in column b for inline table; line 1 pos 14 -- !query @@ -152,12 +137,7 @@ select * from values ("one"), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expected 2 columns but found 1 columns in row 0; line 1 pos 14" - } -} +expected 2 columns but found 1 columns in row 0; line 1 pos 14 -- !query @@ -166,12 +146,7 @@ select * from values ("one", random_not_exist_func(1)), ("two", 2) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Undefined function: random_not_exist_func. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.random_not_exist_func.; line 1 pos 29" - } -} +Undefined function: random_not_exist_func. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.random_not_exist_func.; line 1 pos 29 -- !query @@ -180,12 +155,7 @@ select * from values ("one", count(1)), ("two", 2) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot evaluate expression count(1) in inline table definition; line 1 pos 29" - } -} +cannot evaluate expression count(1) in inline table definition; line 1 pos 29 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out b/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out index 41ac5b34101a9..062c3761d2513 100644 --- a/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out @@ -95,12 +95,7 @@ SELECT array(1), 2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "IntersectAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table" - } -} +IntersectAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table -- !query @@ -111,12 +106,7 @@ SELECT k, v FROM tab2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "IntersectAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns" - } -} +IntersectAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index e54302e3ba05d..5464c69690c2a 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -13,12 +13,12 @@ select interval 4 month 2 weeks 3 microseconds * 1.5 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval 4 month 2 weeks 3 microseconds(line 1, pos 7)\n\n== SQL ==\nselect interval 4 month 2 weeks 3 microseconds * 1.5\n-------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval 4 month 2 weeks 3 microseconds(line 1, pos 7) + +== SQL == +select interval 4 month 2 weeks 3 microseconds * 1.5 +-------^^^ -- !query @@ -51,12 +51,7 @@ select interval 2147483647 month * 2 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "integer overflow" - } -} +integer overflow -- !query @@ -65,12 +60,7 @@ select interval 2147483647 month / 0.5 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Overflow" - } -} +Overflow -- !query @@ -79,12 +69,7 @@ select interval 2147483647 day * 2 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -93,12 +78,7 @@ select interval 2147483647 day / 0.5 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -203,12 +183,7 @@ select '2' / interval 2 second struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2' AS DOUBLE) / INTERVAL '02' SECOND)' due to data type mismatch: differing types in '(CAST('2' AS DOUBLE) / INTERVAL '02' SECOND)' (double and interval second).; line 1 pos 7" - } -} +cannot resolve '(CAST('2' AS DOUBLE) / INTERVAL '02' SECOND)' due to data type mismatch: differing types in '(CAST('2' AS DOUBLE) / INTERVAL '02' SECOND)' (double and interval second).; line 1 pos 7 -- !query @@ -217,12 +192,7 @@ select '2' / interval 2 year struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2' AS DOUBLE) / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(CAST('2' AS DOUBLE) / INTERVAL '2' YEAR)' (double and interval year).; line 1 pos 7" - } -} +cannot resolve '(CAST('2' AS DOUBLE) / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(CAST('2' AS DOUBLE) / INTERVAL '2' YEAR)' (double and interval year).; line 1 pos 7 -- !query @@ -319,12 +289,7 @@ select 2 / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(2 / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(2 / INTERVAL '2' YEAR)' (int and interval year).; line 1 pos 7" - } -} +cannot resolve '(2 / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(2 / INTERVAL '2' YEAR)' (int and interval year).; line 1 pos 7 -- !query @@ -333,12 +298,7 @@ select 2 / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(2 / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(2 / INTERVAL '02' HOUR)' (int and interval hour).; line 1 pos 7" - } -} +cannot resolve '(2 / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(2 / INTERVAL '02' HOUR)' (int and interval hour).; line 1 pos 7 -- !query @@ -347,12 +307,7 @@ select null / interval '2' year struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(NULL / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(NULL / INTERVAL '2' YEAR)' (void and interval year).; line 1 pos 7" - } -} +cannot resolve '(NULL / INTERVAL '2' YEAR)' due to data type mismatch: differing types in '(NULL / INTERVAL '2' YEAR)' (void and interval year).; line 1 pos 7 -- !query @@ -361,12 +316,7 @@ select null / interval '2' hour struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(NULL / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(NULL / INTERVAL '02' HOUR)' (void and interval hour).; line 1 pos 7" - } -} +cannot resolve '(NULL / INTERVAL '02' HOUR)' due to data type mismatch: differing types in '(NULL / INTERVAL '02' HOUR)' (void and interval hour).; line 1 pos 7 -- !query @@ -375,12 +325,12 @@ select -interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect -interval '-1 month 1 day -1 second'\n--------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8) + +== SQL == +select -interval '-1 month 1 day -1 second' +--------^^^ -- !query @@ -405,12 +355,12 @@ select -interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect -interval -1 month 1 day -1 second\n--------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8) + +== SQL == +select -interval -1 month 1 day -1 second +--------^^^ -- !query @@ -435,12 +385,12 @@ select +interval '-1 month 1 day -1 second' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8)\n\n== SQL ==\nselect +interval '-1 month 1 day -1 second'\n--------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval '-1 month 1 day -1 second'(line 1, pos 8) + +== SQL == +select +interval '-1 month 1 day -1 second' +--------^^^ -- !query @@ -465,12 +415,12 @@ select +interval -1 month 1 day -1 second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8)\n\n== SQL ==\nselect +interval -1 month 1 day -1 second\n--------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval -1 month 1 day -1 second(line 1, pos 8) + +== SQL == +select +interval -1 month 1 day -1 second +--------^^^ -- !query @@ -751,12 +701,7 @@ select make_dt_interval(2147483647) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -797,12 +742,7 @@ select make_ym_interval(178956970, 8) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "integer overflow" - } -} +integer overflow -- !query @@ -819,12 +759,7 @@ select make_ym_interval(-178956970, -9) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "integer overflow" - } -} +integer overflow -- !query @@ -897,12 +832,12 @@ select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisec struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond(line 1, pos 7)\n\n== SQL ==\nselect interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond\n-------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond(line 1, pos 7) + +== SQL == +select interval 1 year 2 month 3 week 4 day 5 hour 6 minute 7 seconds 8 millisecond 9 microsecond +-------^^^ -- !query @@ -935,12 +870,12 @@ select interval '30' year '25' month '-100' day '40' hour '80' minute '299.88998 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot mix year-month and day-time fields: interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second(line 1, pos 7)\n\n== SQL ==\nselect interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second\n-------^^^\n" - } -} + +Cannot mix year-month and day-time fields: interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second(line 1, pos 7) + +== SQL == +select interval '30' year '25' month '-100' day '40' hour '80' minute '299.889987299' second +-------^^^ -- !query @@ -1069,12 +1004,12 @@ select interval '20 15:40:32.99899999' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to hour\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '20 15:40:32.99899999' day to hour +----------------^^^ -- !query @@ -1083,12 +1018,12 @@ select interval '20 15:40:32.99899999' day to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 15:40:32.99899999' day to minute\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '20 15:40:32.99899999' day to minute +----------------^^^ -- !query @@ -1097,12 +1032,12 @@ select interval '15:40:32.99899999' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40:32.99899999' hour to minute\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '15:40:32.99899999' hour to minute +----------------^^^ -- !query @@ -1111,12 +1046,12 @@ select interval '15:40.99899999' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40.99899999' hour to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '15:40.99899999' hour to second +----------------^^^ -- !query @@ -1125,12 +1060,12 @@ select interval '15:40' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '15:40' hour to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '15:40' hour to second +----------------^^^ -- !query @@ -1139,12 +1074,12 @@ select interval '20 40:32.99899999' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '20 40:32.99899999' minute to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '20 40:32.99899999' minute to second +----------------^^^ -- !query @@ -1153,12 +1088,12 @@ select interval 10 nanoseconds struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nError parsing ' 10 nanoseconds' to interval, invalid unit 'nanoseconds'(line 1, pos 16)\n\n== SQL ==\nselect interval 10 nanoseconds\n----------------^^^\n" - } -} + +Error parsing ' 10 nanoseconds' to interval, invalid unit 'nanoseconds'(line 1, pos 16) + +== SQL == +select interval 10 nanoseconds +----------------^^^ -- !query @@ -1247,12 +1182,12 @@ select interval struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nat least one time unit should be given for interval literal(line 1, pos 7)\n\n== SQL ==\nselect interval\n-------^^^\n" - } -} + +at least one time unit should be given for interval literal(line 1, pos 7) + +== SQL == +select interval +-------^^^ -- !query @@ -1261,12 +1196,12 @@ select interval 1 fake_unit struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nError parsing ' 1 fake_unit' to interval, invalid unit 'fake_unit'(line 1, pos 16)\n\n== SQL ==\nselect interval 1 fake_unit\n----------------^^^\n" - } -} + +Error parsing ' 1 fake_unit' to interval, invalid unit 'fake_unit'(line 1, pos 16) + +== SQL == +select interval 1 fake_unit +----------------^^^ -- !query @@ -1275,12 +1210,12 @@ select interval 1 year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nThe value of from-to unit must be a string(line 1, pos 16)\n\n== SQL ==\nselect interval 1 year to month\n----------------^^^\n" - } -} + +The value of from-to unit must be a string(line 1, pos 16) + +== SQL == +select interval 1 year to month +----------------^^^ -- !query @@ -1289,12 +1224,12 @@ select interval '1' year to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nIntervals FROM year TO second are not supported.(line 1, pos 16)\n\n== SQL ==\nselect interval '1' year to second\n----------------^^^\n" - } -} + +Intervals FROM year TO second are not supported.(line 1, pos 16) + +== SQL == +select interval '1' year to second +----------------^^^ -- !query @@ -1303,12 +1238,12 @@ select interval '10-9' year to month '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '2-1' year to month\n-------------------------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) + +== SQL == +select interval '10-9' year to month '2-1' year to month +-------------------------------------^^^ -- !query @@ -1317,12 +1252,12 @@ select interval '10-9' year to month '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '12:11:10' hour to second\n-------------------------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) + +== SQL == +select interval '10-9' year to month '12:11:10' hour to second +-------------------------------------^^^ -- !query @@ -1331,12 +1266,12 @@ select interval '1 15:11' day to minute '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 40)\n\n== SQL ==\nselect interval '1 15:11' day to minute '12:11:10' hour to second\n----------------------------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 40) + +== SQL == +select interval '1 15:11' day to minute '12:11:10' hour to second +----------------------------------------^^^ -- !query @@ -1345,12 +1280,12 @@ select interval 1 year '2-1' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '2-1' year to month\n-----------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 23) + +== SQL == +select interval 1 year '2-1' year to month +-----------------------^^^ -- !query @@ -1359,12 +1294,12 @@ select interval 1 year '12:11:10' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 23)\n\n== SQL ==\nselect interval 1 year '12:11:10' hour to second\n-----------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 23) + +== SQL == +select interval 1 year '12:11:10' hour to second +-----------------------^^^ -- !query @@ -1373,12 +1308,12 @@ select interval '10-9' year to month '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 37)\n\n== SQL ==\nselect interval '10-9' year to month '1' year\n-------------------------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 37) + +== SQL == +select interval '10-9' year to month '1' year +-------------------------------------^^^ -- !query @@ -1387,12 +1322,12 @@ select interval '12:11:10' hour to second '1' year struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only have a single from-to unit in the interval literal syntax(line 1, pos 42)\n\n== SQL ==\nselect interval '12:11:10' hour to second '1' year\n------------------------------------------^^^\n" - } -} + +Can only have a single from-to unit in the interval literal syntax(line 1, pos 42) + +== SQL == +select interval '12:11:10' hour to second '1' year +------------------------------------------^^^ -- !query @@ -1401,12 +1336,7 @@ select interval (-30) day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7" - } -} +Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 -- !query @@ -1415,12 +1345,7 @@ select interval (a + 1) day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7" - } -} +Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 -- !query @@ -1445,12 +1370,7 @@ select interval (-30) days struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7" - } -} +Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 -- !query @@ -1459,12 +1379,7 @@ select interval (a + 1) days struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7" - } -} +Undefined function: interval. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.interval.; line 1 pos 7 -- !query @@ -1497,12 +1412,12 @@ SELECT INTERVAL '178956970-8' YEAR TO MONTH struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nError parsing interval year-month string: integer overflow(line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '178956970-8' YEAR TO MONTH\n----------------^^^\n" - } -} + +Error parsing interval year-month string: integer overflow(line 1, pos 16) + +== SQL == +SELECT INTERVAL '178956970-8' YEAR TO MONTH +----------------^^^ -- !query @@ -1547,12 +1462,7 @@ select struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '2' YEAR + CAST('3-3 year to month' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST('3-3 year to month' AS DOUBLE))' (interval year and double).; line 2 pos 2" - } -} +cannot resolve '(INTERVAL '2' YEAR + CAST('3-3 year to month' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST('3-3 year to month' AS DOUBLE))' (interval year and double).; line 2 pos 2 -- !query @@ -1577,12 +1487,7 @@ select interval '2' year + '3-3' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '2' YEAR + CAST('3-3' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST('3-3' AS DOUBLE))' (interval year and double).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '2' YEAR + CAST('3-3' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST('3-3' AS DOUBLE))' (interval year and double).; line 1 pos 7 -- !query @@ -1591,12 +1496,7 @@ select interval '2' year - '4' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '2' YEAR - CAST('4' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - CAST('4' AS DOUBLE))' (interval year and double).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '2' YEAR - CAST('4' AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - CAST('4' AS DOUBLE))' (interval year and double).; line 1 pos 7 -- !query @@ -1629,12 +1529,7 @@ select interval '2' year + str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '2' YEAR + CAST(interval_view.str AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST(interval_view.str AS DOUBLE))' (interval year and double).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '2' YEAR + CAST(interval_view.str AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR + CAST(interval_view.str AS DOUBLE))' (interval year and double).; line 1 pos 7 -- !query @@ -1643,12 +1538,7 @@ select interval '2' year - str from interval_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '2' YEAR - CAST(interval_view.str AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - CAST(interval_view.str AS DOUBLE))' (interval year and double).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '2' YEAR - CAST(interval_view.str AS DOUBLE))' due to data type mismatch: differing types in '(INTERVAL '2' YEAR - CAST(interval_view.str AS DOUBLE))' (interval year and double).; line 1 pos 7 -- !query @@ -1673,12 +1563,7 @@ select interval '2-2' year to month + interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7" - } -} +cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7 -- !query @@ -1687,12 +1572,7 @@ select interval '3' day + interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7" - } -} +cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7 -- !query @@ -1701,12 +1581,7 @@ select interval '2-2' year to month - interval '3' day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7" - } -} +cannot resolve 'INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2-2' YEAR TO MONTH' is of interval year to month type.; line 1 pos 7 -- !query @@ -1715,12 +1590,7 @@ select interval '3' day - interval '2-2' year to month struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' due to data type mismatch: differing types in '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' (interval day and interval year to month).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' due to data type mismatch: differing types in '(INTERVAL '3' DAY - INTERVAL '2-2' YEAR TO MONTH)' (interval day and interval year to month).; line 1 pos 7 -- !query @@ -1729,12 +1599,7 @@ select 1 - interval '2' second struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7" - } -} +cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7 -- !query @@ -1743,12 +1608,7 @@ select 1 + interval '2' month struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(1 + INTERVAL '2' MONTH)' due to data type mismatch: differing types in '(1 + INTERVAL '2' MONTH)' (int and interval month).; line 1 pos 7" - } -} +cannot resolve '(1 + INTERVAL '2' MONTH)' due to data type mismatch: differing types in '(1 + INTERVAL '2' MONTH)' (int and interval month).; line 1 pos 7 -- !query @@ -1757,12 +1617,7 @@ select interval '2' second + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '1 + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7" - } -} +cannot resolve '1 + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7 -- !query @@ -1771,12 +1626,7 @@ select interval '2' month - 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '2' MONTH - 1)' due to data type mismatch: differing types in '(INTERVAL '2' MONTH - 1)' (interval month and int).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '2' MONTH - 1)' due to data type mismatch: differing types in '(INTERVAL '2' MONTH - 1)' (interval month and int).; line 1 pos 7 -- !query @@ -1833,12 +1683,12 @@ select interval '-\t2-2\t' year to month struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match year-month format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: -\t2-2\t(line 1, pos 16)\n\n== SQL ==\nselect interval '-\\t2-2\\t' year to month\n----------------^^^\n" - } -} + +Interval string does not match year-month format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: - 2-2 (line 1, pos 16) + +== SQL == +select interval '-\t2-2\t' year to month +----------------^^^ -- !query @@ -1855,12 +1705,13 @@ select interval '\n-\t10\t 12:34:46.789\t' day to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: \n-\t10\t 12:34:46.789\t, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nselect interval '\\n-\\t10\\t 12:34:46.789\\t' day to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: +- 10 12:34:46.789 , set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +select interval '\n-\t10\t 12:34:46.789\t' day to second +----------------^^^ -- !query @@ -1869,12 +1720,12 @@ select interval '中文 interval 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: 中文 interval 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval '中文 interval 1 day'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: 中文 interval 1 day(line 1, pos 7) + +== SQL == +select interval '中文 interval 1 day' +-------^^^ -- !query @@ -1883,12 +1734,12 @@ select interval 'interval中文 1 day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: interval中文 1 day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval中文 1 day'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: interval中文 1 day(line 1, pos 7) + +== SQL == +select interval 'interval中文 1 day' +-------^^^ -- !query @@ -1897,12 +1748,12 @@ select interval 'interval 1中文day' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: interval 1中文day(line 1, pos 7)\n\n== SQL ==\nselect interval 'interval 1中文day'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: interval 1中文day(line 1, pos 7) + +== SQL == +select interval 'interval 1中文day' +-------^^^ -- !query @@ -1959,12 +1810,7 @@ select a * 1.1 from values (interval '-2147483648 months', interval '2147483647 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Overflow" - } -} +Overflow -- !query @@ -1973,12 +1819,7 @@ select a / 0.5 from values (interval '-2147483648 months', interval '2147483647 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Overflow" - } -} +Overflow -- !query @@ -2039,12 +1880,12 @@ select interval '+' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: +(line 1, pos 7)\n\n== SQL ==\nselect interval '+'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: +(line 1, pos 7) + +== SQL == +select interval '+' +-------^^^ -- !query @@ -2053,12 +1894,12 @@ select interval '+.' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: +.(line 1, pos 7)\n\n== SQL ==\nselect interval '+.'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: +.(line 1, pos 7) + +== SQL == +select interval '+.' +-------^^^ -- !query @@ -2067,12 +1908,12 @@ select interval '1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: 1(line 1, pos 7) + +== SQL == +select interval '1' +-------^^^ -- !query @@ -2081,12 +1922,12 @@ select interval '1.2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: 1.2(line 1, pos 7)\n\n== SQL ==\nselect interval '1.2'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: 1.2(line 1, pos 7) + +== SQL == +select interval '1.2' +-------^^^ -- !query @@ -2095,12 +1936,12 @@ select interval '- 2' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: - 2(line 1, pos 7)\n\n== SQL ==\nselect interval '- 2'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: - 2(line 1, pos 7) + +== SQL == +select interval '- 2' +-------^^^ -- !query @@ -2109,12 +1950,12 @@ select interval '1 day -' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: 1 day -(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day -'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: 1 day -(line 1, pos 7) + +== SQL == +select interval '1 day -' +-------^^^ -- !query @@ -2123,12 +1964,12 @@ select interval '1 day 1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the INTERVAL value: 1 day 1(line 1, pos 7)\n\n== SQL ==\nselect interval '1 day 1'\n-------^^^\n" - } -} + +Cannot parse the INTERVAL value: 1 day 1(line 1, pos 7) + +== SQL == +select interval '1 day 1' +-------^^^ -- !query @@ -2137,12 +1978,12 @@ select interval '1 day 2' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: 1 day 2(line 1, pos 16)\n\n== SQL ==\nselect interval '1 day 2' day\n----------------^^^\n" - } -} + +Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: 1 day 2(line 1, pos 16) + +== SQL == +select interval '1 day 2' day +----------------^^^ -- !query @@ -2151,12 +1992,12 @@ select interval 'interval 1' day struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCan only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: interval 1(line 1, pos 16)\n\n== SQL ==\nselect interval 'interval 1' day\n----------------^^^\n" - } -} + +Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: interval 1(line 1, pos 16) + +== SQL == +select interval 'interval 1' day +----------------^^^ -- !query @@ -2235,12 +2076,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1.0 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Overflow" - } -} +Overflow -- !query @@ -2249,12 +2085,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1.0D struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "not in range" - } -} +not in range -- !query @@ -2325,12 +2156,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0 struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Overflow" - } -} +Overflow -- !query @@ -2339,12 +2165,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0D struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "not in range" - } -} +not in range -- !query @@ -2449,12 +2270,12 @@ SELECT INTERVAL '106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '106751992 04' DAY TO HOUR\n----------------^^^\n" - } -} + +requirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16) + +== SQL == +SELECT INTERVAL '106751992 04' DAY TO HOUR +----------------^^^ -- !query @@ -2463,12 +2284,12 @@ SELECT INTERVAL '-106751992 04' DAY TO HOUR struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nrequirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-106751992 04' DAY TO HOUR\n----------------^^^\n" - } -} + +requirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16) + +== SQL == +SELECT INTERVAL '-106751992 04' DAY TO HOUR +----------------^^^ -- !query @@ -2477,12 +2298,12 @@ SELECT INTERVAL '2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '2562047789:00' HOUR TO MINUTE\n----------------^^^\n" - } -} + +requirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16) + +== SQL == +SELECT INTERVAL '2562047789:00' HOUR TO MINUTE +----------------^^^ -- !query @@ -2491,12 +2312,12 @@ SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nrequirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-2562047789:00' HOUR TO MINUTE\n----------------^^^\n" - } -} + +requirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 16) + +== SQL == +SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE +----------------^^^ -- !query @@ -2505,12 +2326,12 @@ SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n" - } -} + +requirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16) + +== SQL == +SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND +----------------^^^ -- !query @@ -2519,12 +2340,12 @@ SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nrequirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16)\n\n== SQL ==\nSELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND\n----------------^^^\n" - } -} + +requirement failed: minute 153722867281 outside range [0, 153722867280](line 1, pos 16) + +== SQL == +SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND +----------------^^^ -- !query @@ -2661,12 +2482,7 @@ SELECT INTERVAL 1 MONTH > INTERVAL 20 DAYS struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' (interval month and interval day).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH > INTERVAL '20' DAY)' (interval month and interval day).; line 1 pos 7 -- !query @@ -2675,12 +2491,7 @@ SELECT INTERVAL '1' DAY < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' DAY < '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY < '1')' (interval day and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' DAY < '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY < '1')' (interval day and string).; line 1 pos 7 -- !query @@ -2689,12 +2500,7 @@ SELECT INTERVAL '1' DAY = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' DAY = '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY = '1')' (interval day and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' DAY = '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY = '1')' (interval day and string).; line 1 pos 7 -- !query @@ -2703,12 +2509,7 @@ SELECT INTERVAL '1' DAY > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' DAY > '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY > '1')' (interval day and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' DAY > '1')' due to data type mismatch: differing types in '(INTERVAL '1' DAY > '1')' (interval day and string).; line 1 pos 7 -- !query @@ -2717,12 +2518,7 @@ SELECT '1' < INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' < INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' < INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7" - } -} +cannot resolve '('1' < INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' < INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7 -- !query @@ -2731,12 +2527,7 @@ SELECT '1' = INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' = INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' = INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7" - } -} +cannot resolve '('1' = INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' = INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7 -- !query @@ -2745,12 +2536,7 @@ SELECT '1' > INTERVAL '1' DAY struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' > INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' > INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7" - } -} +cannot resolve '('1' > INTERVAL '1' DAY)' due to data type mismatch: differing types in '('1' > INTERVAL '1' DAY)' (string and interval day).; line 1 pos 7 -- !query @@ -2759,12 +2545,7 @@ SELECT INTERVAL '1' YEAR < '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' YEAR < '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR < '1')' (interval year and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' YEAR < '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR < '1')' (interval year and string).; line 1 pos 7 -- !query @@ -2773,12 +2554,7 @@ SELECT INTERVAL '1' YEAR = '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' YEAR = '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR = '1')' (interval year and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' YEAR = '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR = '1')' (interval year and string).; line 1 pos 7 -- !query @@ -2787,12 +2563,7 @@ SELECT INTERVAL '1' YEAR > '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' YEAR > '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR > '1')' (interval year and string).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' YEAR > '1')' due to data type mismatch: differing types in '(INTERVAL '1' YEAR > '1')' (interval year and string).; line 1 pos 7 -- !query @@ -2801,12 +2572,7 @@ SELECT '1' < INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' < INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' < INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7" - } -} +cannot resolve '('1' < INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' < INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7 -- !query @@ -2815,12 +2581,7 @@ SELECT '1' = INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' = INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' = INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7" - } -} +cannot resolve '('1' = INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' = INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7 -- !query @@ -2829,12 +2590,7 @@ SELECT '1' > INTERVAL '1' YEAR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' > INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' > INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7" - } -} +cannot resolve '('1' > INTERVAL '1' YEAR)' due to data type mismatch: differing types in '('1' > INTERVAL '1' YEAR)' (string and interval year).; line 1 pos 7 -- !query @@ -2859,12 +2615,7 @@ SELECT array(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'array(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function array should all be the same type, but it's [interval month, interval day]; line 1 pos 7" - } -} +cannot resolve 'array(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function array should all be the same type, but it's [interval month, interval day]; line 1 pos 7 -- !query @@ -2889,12 +2640,7 @@ SELECT coalesce(INTERVAL 1 MONTH, INTERVAL 20 DAYS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function coalesce should all be the same type, but it's [interval month, interval day]; line 1 pos 7" - } -} +cannot resolve 'coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)' due to data type mismatch: input to function coalesce should all be the same type, but it's [interval month, interval day]; line 1 pos 7 -- !query @@ -2951,12 +2697,7 @@ SELECT div(INTERVAL '1' MONTH, INTERVAL '-1' DAY) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' (interval month and interval day).; line 1 pos 7" - } -} +cannot resolve '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' due to data type mismatch: differing types in '(INTERVAL '1' MONTH div INTERVAL '-1' DAY)' (interval month and interval day).; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out index 1ec5ec03a7f34..e8e6741279b34 100644 --- a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out @@ -114,12 +114,7 @@ SELECT * FROM t1, LATERAL (SELECT t1.*, t2.* FROM t2, LATERAL (SELECT t1.*, t2.* struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 't1.*' given input columns 'c1, c2'; line 1 pos 70" - } -} +cannot resolve 't1.*' given input columns 'c1, c2'; line 1 pos 70 -- !query @@ -303,12 +298,11 @@ SELECT * FROM t1, LATERAL (SELECT c1 + c2 + rand(0) AS c3) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row\nSubqueryAlias __auto_generated_subquery_name\n+- Project [(cast((outer(c1#x) + outer(c2#x)) as double) + rand(0)) AS c3#x]\n +- OneRowRelation\n; line 1 pos 9" - } -} +Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row +SubqueryAlias __auto_generated_subquery_name ++- Project [(cast((outer(c1#x) + outer(c2#x)) as double) + rand(0)) AS c3#x] + +- OneRowRelation +; line 1 pos 9 -- !query @@ -317,12 +311,14 @@ SELECT * FROM t1, LATERAL (SELECT rand(0) FROM t2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row\nSubqueryAlias __auto_generated_subquery_name\n+- Project [rand(0) AS rand(0)#x]\n +- SubqueryAlias spark_catalog.default.t2\n +- View (`spark_catalog`.`default`.`t2`, [c1#x,c2#x])\n +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x]\n +- LocalRelation [col1#x, col2#x]\n; line 1 pos 9" - } -} +Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row +SubqueryAlias __auto_generated_subquery_name ++- Project [rand(0) AS rand(0)#x] + +- SubqueryAlias spark_catalog.default.t2 + +- View (`spark_catalog`.`default`.`t2`, [c1#x,c2#x]) + +- Project [cast(col1#x as int) AS c1#x, cast(col2#x as int) AS c2#x] + +- LocalRelation [col1#x, col2#x] +; line 1 pos 9 -- !query @@ -331,12 +327,7 @@ SELECT * FROM t1 JOIN LATERAL (SELECT * FROM t2) s ON t1.c1 + rand(0) = s.c1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Lateral join condition cannot be non-deterministic: ((CAST(spark_catalog.default.t1.c1 AS DOUBLE) + rand(0)) = CAST(s.c1 AS DOUBLE)); line 1 pos 17" - } -} +Lateral join condition cannot be non-deterministic: ((CAST(spark_catalog.default.t1.c1 AS DOUBLE) + rand(0)) = CAST(s.c1 AS DOUBLE)); line 1 pos 17 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out index 6929fb464d6ac..ca079e4add048 100644 --- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out @@ -69,12 +69,7 @@ select to_json(named_struct('a', 1, 'b', 2), named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Must use a map() function for options; line 1 pos 7" - } -} +Must use a map() function for options; line 1 pos 7 -- !query @@ -83,12 +78,7 @@ select to_json(named_struct('a', 1, 'b', 2), map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "A type of keys and values in map() must be string, but got map; line 1 pos 7" - } -} +A type of keys and values in map() must be string, but got map; line 1 pos 7 -- !query @@ -97,12 +87,7 @@ select to_json() struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function to_json. Expected: one of 1 and 2; Found: 0; line 1 pos 7" - } -} +Invalid number of arguments for function to_json. Expected: one of 1 and 2; Found: 0; line 1 pos 7 -- !query @@ -127,12 +112,7 @@ select from_json('{"a":1}', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The expression '1' is not a valid schema string.; line 1 pos 7" - } -} +The expression '1' is not a valid schema string.; line 1 pos 7 -- !query @@ -141,12 +121,20 @@ select from_json('{"a":1}', 'a InvalidType') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot parse the data type: \n[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n\nFailed fallback parsing: \nDataType invalidtype is not supported.(line 1, pos 2)\n\n== SQL ==\na InvalidType\n--^^^\n; line 1 pos 7" - } -} +Cannot parse the data type: +[PARSE_SYNTAX_ERROR] Syntax error at or near 'InvalidType': extra input 'InvalidType'(line 1, pos 2) + +== SQL == +a InvalidType +--^^^ + +Failed fallback parsing: +DataType invalidtype is not supported.(line 1, pos 2) + +== SQL == +a InvalidType +--^^^ +; line 1 pos 7 -- !query @@ -155,12 +143,7 @@ select from_json('{"a":1}', 'a INT', named_struct('mode', 'PERMISSIVE')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Must use a map() function for options; line 1 pos 7" - } -} +Must use a map() function for options; line 1 pos 7 -- !query @@ -169,12 +152,7 @@ select from_json('{"a":1}', 'a INT', map('mode', 1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "A type of keys and values in map() must be string, but got map; line 1 pos 7" - } -} +A type of keys and values in map() must be string, but got map; line 1 pos 7 -- !query @@ -183,12 +161,7 @@ select from_json() struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function from_json. Expected: one of 2 and 3; Found: 0; line 1 pos 7" - } -} +Invalid number of arguments for function from_json. Expected: one of 2 and 3; Found: 0; line 1 pos 7 -- !query @@ -430,12 +403,7 @@ select schema_of_json(null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'schema_of_json(NULL)' due to data type mismatch: The input json should be a foldable string expression and not null; however, got NULL.; line 1 pos 7" - } -} +cannot resolve 'schema_of_json(NULL)' due to data type mismatch: The input json should be a foldable string expression and not null; however, got NULL.; line 1 pos 7 -- !query @@ -452,12 +420,7 @@ SELECT schema_of_json(jsonField) FROM jsonTable struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'schema_of_json(jsontable.jsonField)' due to data type mismatch: The input json should be a foldable string expression and not null; however, got jsontable.jsonField.; line 1 pos 7" - } -} +cannot resolve 'schema_of_json(jsontable.jsonField)' due to data type mismatch: The input json should be a foldable string expression and not null; however, got jsontable.jsonField.; line 1 pos 7 -- !query @@ -474,12 +437,7 @@ select json_array_length(2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'json_array_length(2)' due to data type mismatch: argument 1 requires string type, however, '2' is of int type.; line 1 pos 7" - } -} +cannot resolve 'json_array_length(2)' due to data type mismatch: argument 1 requires string type, however, '2' is of int type.; line 1 pos 7 -- !query @@ -488,12 +446,7 @@ select json_array_length() struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function json_array_length. Expected: 1; Found: 0; line 1 pos 7" - } -} +Invalid number of arguments for function json_array_length. Expected: 1; Found: 0; line 1 pos 7 -- !query @@ -566,12 +519,7 @@ select json_object_keys() struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function json_object_keys. Expected: 1; Found: 0; line 1 pos 7" - } -} +Invalid number of arguments for function json_object_keys. Expected: 1; Found: 0; line 1 pos 7 -- !query @@ -588,12 +536,7 @@ select json_object_keys(200) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'json_object_keys(200)' due to data type mismatch: argument 1 requires string type, however, '200' is of int type.; line 1 pos 7" - } -} +cannot resolve 'json_object_keys(200)' due to data type mismatch: argument 1 requires string type, however, '200' is of int type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/like-all.sql.out b/sql/core/src/test/resources/sql-tests/results/like-all.sql.out index e2ce9ad15d6ad..3a252d84ba155 100644 --- a/sql/core/src/test/resources/sql-tests/results/like-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/like-all.sql.out @@ -129,9 +129,9 @@ SELECT company FROM like_all_table WHERE company LIKE ALL () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nExpected something between '(' and ')'.(line 1, pos 49)\n\n== SQL ==\nSELECT company FROM like_all_table WHERE company LIKE ALL ()\n-------------------------------------------------^^^\n" - } -} + +Expected something between '(' and ')'.(line 1, pos 49) + +== SQL == +SELECT company FROM like_all_table WHERE company LIKE ALL () +-------------------------------------------------^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/like-any.sql.out b/sql/core/src/test/resources/sql-tests/results/like-any.sql.out index bd31041dc777f..ecf7b6e7bbcc3 100644 --- a/sql/core/src/test/resources/sql-tests/results/like-any.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/like-any.sql.out @@ -135,9 +135,9 @@ SELECT company FROM like_any_table WHERE company LIKE ANY () struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nExpected something between '(' and ')'.(line 1, pos 49)\n\n== SQL ==\nSELECT company FROM like_any_table WHERE company LIKE ANY ()\n-------------------------------------------------^^^\n" - } -} + +Expected something between '(' and ')'.(line 1, pos 49) + +== SQL == +SELECT company FROM like_any_table WHERE company LIKE ANY () +-------------------------------------------------^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/limit.sql.out b/sql/core/src/test/resources/sql-tests/results/limit.sql.out index eb7079a9a4adf..7d1c1e2b34dff 100644 --- a/sql/core/src/test/resources/sql-tests/results/limit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/limit.sql.out @@ -50,12 +50,7 @@ SELECT * FROM testdata LIMIT -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The limit expression must be equal to or greater than 0, but got -1" - } -} +The limit expression must be equal to or greater than 0, but got -1 -- !query @@ -64,12 +59,7 @@ SELECT * FROM testData TABLESAMPLE (-1 ROWS) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The limit expression must be equal to or greater than 0, but got -1" - } -} +The limit expression must be equal to or greater than 0, but got -1 -- !query @@ -86,12 +76,7 @@ SELECT * FROM testdata LIMIT CAST(NULL AS INT) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The evaluated limit expression must not be null, but got CAST(NULL AS INT)" - } -} +The evaluated limit expression must not be null, but got CAST(NULL AS INT) -- !query @@ -100,12 +85,7 @@ SELECT * FROM testdata LIMIT key > 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The limit expression must evaluate to a constant value, but got (spark_catalog.default.testdata.key > 3)" - } -} +The limit expression must evaluate to a constant value, but got (spark_catalog.default.testdata.key > 3) -- !query @@ -114,12 +94,7 @@ SELECT * FROM testdata LIMIT true struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The limit expression must be integer type, but got boolean" - } -} +The limit expression must be integer type, but got boolean -- !query @@ -128,12 +103,7 @@ SELECT * FROM testdata LIMIT 'a' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The limit expression must be integer type, but got string" - } -} +The limit expression must be integer type, but got string -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index 46c706c48c0ca..168025d911dea 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -37,12 +37,12 @@ select 128Y struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nNumeric literal 128 does not fit in range [-128, 127] for type tinyint(line 1, pos 7)\n\n== SQL ==\nselect 128Y\n-------^^^\n" - } -} + +Numeric literal 128 does not fit in range [-128, 127] for type tinyint(line 1, pos 7) + +== SQL == +select 128Y +-------^^^ -- !query @@ -67,12 +67,12 @@ select 32768S struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nNumeric literal 32768 does not fit in range [-32768, 32767] for type smallint(line 1, pos 7)\n\n== SQL ==\nselect 32768S\n-------^^^\n" - } -} + +Numeric literal 32768 does not fit in range [-32768, 32767] for type smallint(line 1, pos 7) + +== SQL == +select 32768S +-------^^^ -- !query @@ -97,12 +97,12 @@ select 9223372036854775808L struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nNumeric literal 9223372036854775808 does not fit in range [-9223372036854775808, 9223372036854775807] for type bigint(line 1, pos 7)\n\n== SQL ==\nselect 9223372036854775808L\n-------^^^\n" - } -} + +Numeric literal 9223372036854775808 does not fit in range [-9223372036854775808, 9223372036854775807] for type bigint(line 1, pos 7) + +== SQL == +select 9223372036854775808L +-------^^^ -- !query @@ -143,12 +143,10 @@ select 1234567890123456789012345678901234567890 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890" - } -} + +decimal can only support precision up to 38 +== SQL == +select 1234567890123456789012345678901234567890 -- !query @@ -157,12 +155,10 @@ select 1234567890123456789012345678901234567890.0 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\ndecimal can only support precision up to 38\n== SQL ==\nselect 1234567890123456789012345678901234567890.0" - } -} + +decimal can only support precision up to 38 +== SQL == +select 1234567890123456789012345678901234567890.0 -- !query @@ -187,12 +183,12 @@ select -3.4028235E39f struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nNumeric literal -3.4028235E39 does not fit in range [-3.4028234663852886E+38, 3.4028234663852886E+38] for type float(line 1, pos 7)\n\n== SQL ==\nselect -3.4028235E39f\n-------^^^\n" - } -} + +Numeric literal -3.4028235E39 does not fit in range [-3.4028234663852886E+38, 3.4028234663852886E+38] for type float(line 1, pos 7) + +== SQL == +select -3.4028235E39f +-------^^^ -- !query @@ -233,12 +229,12 @@ select 1E309, -1E309 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nNumeric literal 1E309 does not fit in range [-1.7976931348623157E+308, 1.7976931348623157E+308] for type double(line 1, pos 7)\n\n== SQL ==\nselect 1E309, -1E309\n-------^^^\n" - } -} + +Numeric literal 1E309 does not fit in range [-1.7976931348623157E+308, 1.7976931348623157E+308] for type double(line 1, pos 7) + +== SQL == +select 1E309, -1E309 +-------^^^ -- !query @@ -337,12 +333,12 @@ select date 'mar 11 2016' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: mar 11 2016(line 1, pos 7)\n\n== SQL ==\nselect date 'mar 11 2016'\n-------^^^\n" - } -} + +Cannot parse the DATE value: mar 11 2016(line 1, pos 7) + +== SQL == +select date 'mar 11 2016' +-------^^^ -- !query @@ -359,12 +355,12 @@ select timestamp '2016-33-11 20:54:00.000' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 2016-33-11 20:54:00.000(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2016-33-11 20:54:00.000'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 2016-33-11 20:54:00.000(line 1, pos 7) + +== SQL == +select timestamp '2016-33-11 20:54:00.000' +-------^^^ -- !query @@ -373,12 +369,12 @@ select GEO '(10,-6)' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nLiterals of type 'GEO' are currently not supported.(line 1, pos 7)\n\n== SQL ==\nselect GEO '(10,-6)'\n-------^^^\n" - } -} + +Literals of type 'GEO' are currently not supported.(line 1, pos 7) + +== SQL == +select GEO '(10,-6)' +-------^^^ -- !query @@ -395,12 +391,12 @@ select 1.20E-38BD struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\ndecimal can only support precision up to 38(line 1, pos 7)\n\n== SQL ==\nselect 1.20E-38BD\n-------^^^\n" - } -} + +decimal can only support precision up to 38(line 1, pos 7) + +== SQL == +select 1.20E-38BD +-------^^^ -- !query @@ -417,12 +413,12 @@ select X'XuZ' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\ncontains illegal character for hexBinary: 0XuZ(line 1, pos 7)\n\n== SQL ==\nselect X'XuZ'\n-------^^^\n" - } -} + +contains illegal character for hexBinary: 0XuZ(line 1, pos 7) + +== SQL == +select X'XuZ' +-------^^^ -- !query @@ -439,12 +435,7 @@ select +date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(+ DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7" - } -} +cannot resolve '(+ DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7 -- !query @@ -453,12 +444,7 @@ select +timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7" - } -} +cannot resolve '(+ TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7 -- !query @@ -475,12 +461,7 @@ select +map(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(+ map(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'map(1, 2)' is of map type.; line 1 pos 7" - } -} +cannot resolve '(+ map(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'map(1, 2)' is of map type.; line 1 pos 7 -- !query @@ -489,12 +470,7 @@ select +array(1,2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(+ array(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'array(1, 2)' is of array type.; line 1 pos 7" - } -} +cannot resolve '(+ array(1, 2))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'array(1, 2)' is of array type.; line 1 pos 7 -- !query @@ -503,12 +479,7 @@ select +named_struct('a', 1, 'b', 'spark') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(+ named_struct('a', 1, 'b', 'spark'))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'named_struct('a', 1, 'b', 'spark')' is of struct type.; line 1 pos 7" - } -} +cannot resolve '(+ named_struct('a', 1, 'b', 'spark'))' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'named_struct('a', 1, 'b', 'spark')' is of struct type.; line 1 pos 7 -- !query @@ -517,12 +488,7 @@ select +X'1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(+ X'01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'01'' is of binary type.; line 1 pos 7" - } -} +cannot resolve '(+ X'01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'01'' is of binary type.; line 1 pos 7 -- !query @@ -531,12 +497,7 @@ select -date '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(- DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7" - } -} +cannot resolve '(- DATE '1999-01-01')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'DATE '1999-01-01'' is of date type.; line 1 pos 7 -- !query @@ -545,12 +506,7 @@ select -timestamp '1999-01-01' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(- TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7" - } -} +cannot resolve '(- TIMESTAMP '1999-01-01 00:00:00')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'TIMESTAMP '1999-01-01 00:00:00'' is of timestamp type.; line 1 pos 7 -- !query @@ -559,9 +515,4 @@ select -x'2379ACFe' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(- X'2379ACFE')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'2379ACFE'' is of binary type.; line 1 pos 7" - } -} +cannot resolve '(- X'2379ACFE')' due to data type mismatch: argument 1 requires (numeric or interval day to second or interval year to month or interval) type, however, 'X'2379ACFE'' is of binary type.; line 1 pos 7 diff --git a/sql/core/src/test/resources/sql-tests/results/map.sql.out b/sql/core/src/test/resources/sql-tests/results/map.sql.out index 3caa17edbcd55..cd7cf9a60ce37 100644 --- a/sql/core/src/test/resources/sql-tests/results/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/map.sql.out @@ -69,12 +69,7 @@ select map_contains_key(map('1', 'a', '2', 'b'), 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'map_contains_key(map('1', 'a', '2', 'b'), 1)' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, int].; line 1 pos 7" - } -} +cannot resolve 'map_contains_key(map('1', 'a', '2', 'b'), 1)' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, int].; line 1 pos 7 -- !query @@ -83,9 +78,4 @@ select map_contains_key(map(1, 'a', 2, 'b'), '1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'map_contains_key(map(1, 'a', 2, 'b'), '1')' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, string].; line 1 pos 7" - } -} +cannot resolve 'map_contains_key(map(1, 'a', 2, 'b'), '1')' due to data type mismatch: Input to function map_contains_key should have been map followed by a value with same key type, but it's [map, string].; line 1 pos 7 diff --git a/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out index fe393d6eaf340..7e9bb2f7acd8a 100644 --- a/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/misc-functions.sql.out @@ -69,12 +69,7 @@ SELECT assert_true(false) struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "'false' is not true!" - } -} +'false' is not true! -- !query @@ -83,12 +78,7 @@ SELECT assert_true(boolean(0)) struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "'cast(0 as boolean)' is not true!" - } -} +'cast(0 as boolean)' is not true! -- !query @@ -97,12 +87,7 @@ SELECT assert_true(null) struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "'null' is not true!" - } -} +'null' is not true! -- !query @@ -111,12 +96,7 @@ SELECT assert_true(boolean(null)) struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "'cast(null as boolean)' is not true!" - } -} +'cast(null as boolean)' is not true! -- !query @@ -125,12 +105,7 @@ SELECT assert_true(false, 'custom error message') struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "custom error message" - } -} +custom error message -- !query @@ -147,12 +122,7 @@ SELECT raise_error('error message') struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "error message" - } -} +error message -- !query @@ -161,9 +131,4 @@ SELECT if(v > 5, raise_error('too big: ' || v), v + 1) FROM tbl_misc struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "too big: 8" - } -} +too big: 8 diff --git a/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out b/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out index 358e19202ec0b..581d7d4ae2d39 100644 --- a/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/order-by-ordinal.sql.out @@ -72,12 +72,7 @@ select * from data order by 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "ORDER BY position 0 is not in select list (valid range is [1, 2]); line 1 pos 28" - } -} +ORDER BY position 0 is not in select list (valid range is [1, 2]); line 1 pos 28 -- !query @@ -86,12 +81,7 @@ select * from data order by -1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "ORDER BY position -1 is not in select list (valid range is [1, 2]); line 1 pos 28" - } -} +ORDER BY position -1 is not in select list (valid range is [1, 2]); line 1 pos 28 -- !query @@ -100,12 +90,7 @@ select * from data order by 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "ORDER BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 28" - } -} +ORDER BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 28 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out b/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out index 4dafdb31fabf6..f124dcc322e12 100644 --- a/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/percentiles.sql.out @@ -178,12 +178,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot specify order by or frame for 'percentile_cont'." - } -} +Cannot specify order by or frame for 'percentile_cont'. -- !query @@ -199,12 +194,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot specify order by or frame for 'percentile_disc'." - } -} +Cannot specify order by or frame for 'percentile_disc'. -- !query @@ -219,12 +209,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot specify order by or frame for 'median'." - } -} +Cannot specify order by or frame for 'median'. -- !query @@ -240,12 +225,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot specify order by or frame for 'percentile_cont'." - } -} +Cannot specify order by or frame for 'percentile_cont'. -- !query @@ -261,12 +241,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot specify order by or frame for 'percentile_disc'." - } -} +Cannot specify order by or frame for 'percentile_disc'. -- !query @@ -281,12 +256,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot specify order by or frame for 'median'." - } -} +Cannot specify order by or frame for 'median'. -- !query @@ -366,12 +336,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot specify order by or frame for 'percentile_cont'." - } -} +Cannot specify order by or frame for 'percentile_cont'. -- !query @@ -388,12 +353,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot specify order by or frame for 'percentile_disc'." - } -} +Cannot specify order by or frame for 'percentile_disc'. -- !query @@ -409,12 +369,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot specify order by or frame for 'median'." - } -} +Cannot specify order by or frame for 'median'. -- !query @@ -431,12 +386,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot specify order by or frame for 'percentile_cont'." - } -} +Cannot specify order by or frame for 'percentile_cont'. -- !query @@ -453,12 +403,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot specify order by or frame for 'percentile_disc'." - } -} +Cannot specify order by or frame for 'percentile_disc'. -- !query @@ -474,12 +419,7 @@ ORDER BY salary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Cannot specify order by or frame for 'median'." - } -} +Cannot specify order by or frame for 'median'. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/pivot.sql.out b/sql/core/src/test/resources/sql-tests/results/pivot.sql.out index d9084f0f47f69..239ce7a8eda79 100644 --- a/sql/core/src/test/resources/sql-tests/results/pivot.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/pivot.sql.out @@ -199,12 +199,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Aggregate expression required for pivot, but 'coursesales.earnings' did not appear in any aggregate function." - } -} +Aggregate expression required for pivot, but 'coursesales.earnings' did not appear in any aggregate function. -- !query @@ -219,12 +214,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Aggregate expression required for pivot, but '__auto_generated_subquery_name.year' did not appear in any aggregate function." - } -} +Aggregate expression required for pivot, but '__auto_generated_subquery_name.year' did not appear in any aggregate function. -- !query @@ -276,12 +266,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query." - } -} +It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out index aec2bc85b3836..79bfc138c742c 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part1.sql.out @@ -484,12 +484,7 @@ having exists (select 1 from onek b struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: sum(DISTINCT (outer(a.four) + b.four))" - } -} +Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: sum(DISTINCT (outer(a.four) + b.four)) -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out index a8c4c70ad9823..450dd5ca743cc 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/aggregates_part3.sql.out @@ -5,12 +5,7 @@ select max(min(unique1)) from tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query." - } -} +It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out index 9e8ad0028df52..2ef04e1669232 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out @@ -716,12 +716,11 @@ INSERT INTO BOOLTBL2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "failed to evaluate expression CAST('XXX' AS BOOLEAN): [CAST_INVALID_INPUT] The value 'XXX' of the type \"STRING\" cannot be cast to \"BOOLEAN\" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.\n== SQL(line 2, position 12) ==\n VALUES (boolean('XXX'))\n ^^^^^^^^^^^^^^\n; line 2 pos 3" - } -} +failed to evaluate expression CAST('XXX' AS BOOLEAN): [CAST_INVALID_INPUT] The value 'XXX' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +== SQL(line 2, position 12) == + VALUES (boolean('XXX')) + ^^^^^^^^^^^^^^ +; line 2 pos 3 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out index 72c4016abad8d..6556538304f29 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out @@ -53,12 +53,7 @@ CREATE VIEW key_dependent_view AS struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expression 'spark_catalog.default.view_base_table.data' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get." - } -} +expression 'spark_catalog.default.view_base_table.data' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. -- !query @@ -269,12 +264,7 @@ CREATE VIEW v1_temp AS SELECT * FROM temp_table struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v1_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v1_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW -- !query @@ -330,12 +320,7 @@ CREATE VIEW temp_view_test.v3_temp AS SELECT * FROM temp_table struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v3_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v3_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW -- !query @@ -384,12 +369,7 @@ CREATE VIEW v4_temp AS struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v4_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v4_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW -- !query @@ -401,12 +381,7 @@ CREATE VIEW v5_temp AS struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v5_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v5_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW -- !query @@ -565,12 +540,7 @@ CREATE VIEW v6_temp AS SELECT * FROM base_table WHERE id IN (SELECT id FROM temp struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v6_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v6_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW -- !query @@ -579,12 +549,7 @@ CREATE VIEW v7_temp AS SELECT t1.id, t2.a FROM base_table t1, (SELECT * FROM tem struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v7_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v7_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW -- !query @@ -593,12 +558,7 @@ CREATE VIEW v8_temp AS SELECT * FROM base_table WHERE EXISTS (SELECT 1 FROM temp struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v8_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v8_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW -- !query @@ -607,12 +567,7 @@ CREATE VIEW v9_temp AS SELECT * FROM base_table WHERE NOT EXISTS (SELECT 1 FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v9_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`temp_view_test`.`v9_temp` by referencing a temporary view temp_table. Please create a temp view instead by CREATE TEMP VIEW -- !query @@ -621,12 +576,7 @@ CREATE VIEW v10_temp AS SELECT * FROM v7_temp struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view not found: v7_temp; line 1 pos 38" - } -} +Table or view not found: v7_temp; line 1 pos 38 -- !query @@ -635,12 +585,7 @@ CREATE VIEW v11_temp AS SELECT t1.id, t2.a FROM base_table t1, v10_temp t2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view not found: v10_temp; line 1 pos 63" - } -} +Table or view not found: v10_temp; line 1 pos 63 -- !query @@ -649,12 +594,7 @@ CREATE VIEW v12_temp AS SELECT true FROM v11_temp struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view not found: v11_temp; line 1 pos 41" - } -} +Table or view not found: v11_temp; line 1 pos 41 -- !query @@ -736,12 +676,7 @@ CREATE VIEW temporal1 AS SELECT * FROM t1 CROSS JOIN tt struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal1` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal1` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW -- !query @@ -782,12 +717,7 @@ CREATE VIEW temporal2 AS SELECT * FROM t1 INNER JOIN tt ON t1.num = tt.num2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal2` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal2` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW -- !query @@ -828,12 +758,7 @@ CREATE VIEW temporal3 AS SELECT * FROM t1 LEFT JOIN tt ON t1.num = tt.num2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal3` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal3` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW -- !query @@ -874,12 +799,7 @@ CREATE VIEW temporal4 AS SELECT * FROM t1 LEFT JOIN tt ON t1.num = tt.num2 AND t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal4` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal4` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW -- !query @@ -888,12 +808,7 @@ CREATE VIEW temporal5 AS SELECT * FROM t1 WHERE num IN (SELECT num FROM t1 WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal5` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW" - } -} +Not allowed to create a permanent view `spark_catalog`.`testviewschm2`.`temporal5` by referencing a temporary view tt. Please create a temp view instead by CREATE TEMP VIEW -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out index 618677eb2e663..7edda980be3fd 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/date.sql.out @@ -198,12 +198,12 @@ SELECT date '1999 Jan 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 1999 Jan 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 Jan 08'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 1999 Jan 08(line 1, pos 7) + +== SQL == +SELECT date '1999 Jan 08' +-------^^^ -- !query @@ -212,12 +212,12 @@ SELECT date '1999 08 Jan' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 1999 08 Jan(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 Jan'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 1999 08 Jan(line 1, pos 7) + +== SQL == +SELECT date '1999 08 Jan' +-------^^^ -- !query @@ -242,12 +242,12 @@ SELECT date '1999 01 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 1999 01 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 01 08'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 1999 01 08(line 1, pos 7) + +== SQL == +SELECT date '1999 01 08' +-------^^^ -- !query @@ -256,12 +256,12 @@ SELECT date '1999 08 01' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 1999 08 01(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 01'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 1999 08 01(line 1, pos 7) + +== SQL == +SELECT date '1999 08 01' +-------^^^ -- !query @@ -278,12 +278,12 @@ SELECT date '1999 Jan 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 1999 Jan 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 Jan 08'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 1999 Jan 08(line 1, pos 7) + +== SQL == +SELECT date '1999 Jan 08' +-------^^^ -- !query @@ -292,12 +292,12 @@ SELECT date '1999 08 Jan' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 1999 08 Jan(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 Jan'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 1999 08 Jan(line 1, pos 7) + +== SQL == +SELECT date '1999 08 Jan' +-------^^^ -- !query @@ -322,12 +322,12 @@ SELECT date '1999 01 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 1999 01 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 01 08'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 1999 01 08(line 1, pos 7) + +== SQL == +SELECT date '1999 01 08' +-------^^^ -- !query @@ -336,12 +336,12 @@ SELECT date '1999 08 01' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 1999 08 01(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 01'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 1999 08 01(line 1, pos 7) + +== SQL == +SELECT date '1999 08 01' +-------^^^ -- !query @@ -366,12 +366,12 @@ SELECT date '1999 Jan 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 1999 Jan 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 Jan 08'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 1999 Jan 08(line 1, pos 7) + +== SQL == +SELECT date '1999 Jan 08' +-------^^^ -- !query @@ -380,12 +380,12 @@ SELECT date '1999 08 Jan' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 1999 08 Jan(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 Jan'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 1999 08 Jan(line 1, pos 7) + +== SQL == +SELECT date '1999 08 Jan' +-------^^^ -- !query @@ -410,12 +410,12 @@ SELECT date '1999 01 08' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 1999 01 08(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 01 08'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 1999 01 08(line 1, pos 7) + +== SQL == +SELECT date '1999 01 08' +-------^^^ -- !query @@ -424,12 +424,12 @@ SELECT date '1999 08 01' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the DATE value: 1999 08 01(line 1, pos 7)\n\n== SQL ==\nSELECT date '1999 08 01'\n-------^^^\n" - } -} + +Cannot parse the DATE value: 1999 08 01(line 1, pos 7) + +== SQL == +SELECT date '1999 08 01' +-------^^^ -- !query @@ -578,12 +578,7 @@ select make_date(2013, 2, 30) struct<> -- !query output java.time.DateTimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid date 'FEBRUARY 30'. If necessary set spark.sql.ansi.enabled to false to bypass this error." - } -} +Invalid date 'FEBRUARY 30'. If necessary set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -592,12 +587,7 @@ select make_date(2013, 13, 1) struct<> -- !query output java.time.DateTimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid value for MonthOfYear (valid values 1 - 12): 13. If necessary set spark.sql.ansi.enabled to false to bypass this error." - } -} +Invalid value for MonthOfYear (valid values 1 - 12): 13. If necessary set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -606,12 +596,7 @@ select make_date(2013, 11, -1) struct<> -- !query output java.time.DateTimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid value for DayOfMonth (valid values 1 - 28/31): -1. If necessary set spark.sql.ansi.enabled to false to bypass this error." - } -} +Invalid value for DayOfMonth (valid values 1 - 28/31): -1. If necessary set spark.sql.ansi.enabled to false to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out index 1145389b706f6..e4056ac2e6c8c 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out @@ -852,12 +852,12 @@ SELECT * FROM range(bigint('+4567890123456789'), bigint('+4567890123456799'), 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (long, long, integer): requirement failed: step (0) cannot be 0; line 1 pos 14" - } -} +Table-valued function range with alternatives: + range(start: long, end: long, step: long, numSlices: integer) + range(start: long, end: long, step: long) + range(start: long, end: long) + range(end: long) +cannot be applied to (long, long, integer): requirement failed: step (0) cannot be 0; line 1 pos 14 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out index 0f7c6ca49f946..04df07bff577b 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out @@ -101,12 +101,12 @@ SELECT interval '1 2:03' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' day to hour\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +SELECT interval '1 2:03' day to hour +----------------^^^ -- !query @@ -115,12 +115,12 @@ SELECT interval '1 2:03:04' day to hour struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' day to hour\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]d h`, `INTERVAL [+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +SELECT interval '1 2:03:04' day to hour +----------------^^^ -- !query @@ -137,12 +137,12 @@ SELECT interval '1 2:03:04' day to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' day to minute\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]d h:m`, `INTERVAL [+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +SELECT interval '1 2:03:04' day to minute +----------------^^^ -- !query @@ -151,12 +151,12 @@ SELECT interval '1 2:03' day to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' day to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL [+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +SELECT interval '1 2:03' day to second +----------------^^^ -- !query @@ -173,12 +173,12 @@ SELECT interval '1 2:03' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' hour to minute\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +SELECT interval '1 2:03' hour to minute +----------------^^^ -- !query @@ -187,12 +187,12 @@ SELECT interval '1 2:03:04' hour to minute struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' hour to minute\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]h:m`, `INTERVAL [+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +SELECT interval '1 2:03:04' hour to minute +----------------^^^ -- !query @@ -201,12 +201,12 @@ SELECT interval '1 2:03' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' hour to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +SELECT interval '1 2:03' hour to second +----------------^^^ -- !query @@ -215,12 +215,12 @@ SELECT interval '1 2:03:04' hour to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' hour to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL [+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +SELECT interval '1 2:03:04' hour to second +----------------^^^ -- !query @@ -229,12 +229,12 @@ SELECT interval '1 2:03' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03' minute to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +SELECT interval '1 2:03' minute to second +----------------^^^ -- !query @@ -243,9 +243,9 @@ SELECT interval '1 2:03:04' minute to second struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInterval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16)\n\n== SQL ==\nSELECT interval '1 2:03:04' minute to second\n----------------^^^\n" - } -} + +Interval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL [+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior before Spark 3.0.(line 1, pos 16) + +== SQL == +SELECT interval '1 2:03:04' minute to second +----------------^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out index ee0e46e1171f2..4b2013ba20677 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/join.sql.out @@ -546,12 +546,7 @@ SELECT '' AS `xxx`, i, k, t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'i' is ambiguous, could be: spark_catalog.default.j1_tbl.i, spark_catalog.default.j2_tbl.i.; line 1 pos 20" - } -} +Reference 'i' is ambiguous, could be: spark_catalog.default.j1_tbl.i, spark_catalog.default.j2_tbl.i.; line 1 pos 20 -- !query @@ -3240,12 +3235,7 @@ select * from struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'f1' is ambiguous, could be: j.f1, j.f1.; line 2 pos 63" - } -} +Reference 'f1' is ambiguous, could be: j.f1, j.f1.; line 2 pos 63 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out index feb5d089d8211..ece34bf3f1c66 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/limit.sql.out @@ -131,12 +131,7 @@ select * from int8_tbl limit (case when random() < 0.5 then bigint(null) end) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The limit expression must evaluate to a constant value, but got CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN CAST(NULL AS BIGINT) END" - } -} +The limit expression must evaluate to a constant value, but got CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN CAST(NULL AS BIGINT) END -- !query @@ -145,12 +140,7 @@ select * from int8_tbl offset (case when random() < 0.5 then bigint(null) end) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The offset expression must evaluate to a constant value, but got CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN CAST(NULL AS BIGINT) END" - } -} +The offset expression must evaluate to a constant value, but got CASE WHEN (_nondeterministic < CAST(0.5BD AS DOUBLE)) THEN CAST(NULL AS BIGINT) END -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out index 07b4bb39ab587..c4c24d5ed8615 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out @@ -3581,12 +3581,10 @@ INSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.423308199106402476 struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\ndecimal can only support precision up to 38\n== SQL ==\nINSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.42330819910640247627)" - } -} + +decimal can only support precision up to 38 +== SQL == +INSERT INTO num_exp_power_10_ln VALUES (7,1716699575118597095.42330819910640247627) -- !query @@ -3829,12 +3827,7 @@ INSERT INTO num_result SELECT t1.id, t2.id, t1.val, t2.val, t1.val * t2.val struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "`spark_catalog`.`default`.`num_result` requires that the data to be inserted have the same number of columns as the target table: target table has 3 column(s) but the inserted data has 5 column(s), including 0 partition column(s) having constant value(s)." - } -} +`spark_catalog`.`default`.`num_result` requires that the data to be inserted have the same number of columns as the target table: target table has 3 column(s) but the inserted data has 5 column(s), including 0 partition column(s) having constant value(s). -- !query @@ -4691,12 +4684,7 @@ SELECT '' AS to_number_1, to_number('-34,338,492', '99G999G999') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The input string '-34,338,492' does not match the given number format: '99G999G999'" - } -} +The input string '-34,338,492' does not match the given number format: '99G999G999' -- !query @@ -4705,12 +4693,7 @@ SELECT '' AS to_number_2, to_number('-34,338,492.654,878', '99G999G999D999G999' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'to_number('-34,338,492.654,878', '99G999G999D999G999')' due to data type mismatch: Thousands separators (, or G) may not appear after the decimal point in the number format: '99G999G999D999G999'; line 1 pos 27" - } -} +cannot resolve 'to_number('-34,338,492.654,878', '99G999G999D999G999')' due to data type mismatch: Thousands separators (, or G) may not appear after the decimal point in the number format: '99G999G999D999G999'; line 1 pos 27 -- !query @@ -4759,12 +4742,7 @@ SELECT '' AS to_number_15, to_number('123,000','999G') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'to_number('123,000', '999G')' due to data type mismatch: Thousands separators (, or G) must have digits in between them in the number format: '999G'; line 1 pos 27" - } -} +cannot resolve 'to_number('123,000', '999G')' due to data type mismatch: Thousands separators (, or G) must have digits in between them in the number format: '999G'; line 1 pos 27 -- !query @@ -4773,12 +4751,7 @@ SELECT '' AS to_number_16, to_number('123456','999G999') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The input string '123456' does not match the given number format: '999G999'" - } -} +The input string '123456' does not match the given number format: '999G999' -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out index 518fece9bb04f..4ab2e877902f9 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out @@ -140,12 +140,7 @@ SELECT a FROM test_having HAVING min(a) < max(a) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "grouping expressions sequence is empty, and 'spark_catalog.default.test_having.a' is not an aggregate function. Wrap '(min(spark_catalog.default.test_having.a) AS `min(a#x)`, max(spark_catalog.default.test_having.a) AS `max(a#x)`)' in windowing function(s) or wrap 'spark_catalog.default.test_having.a' in first() (or first_value) if you don't care which value you get." - } -} +grouping expressions sequence is empty, and 'spark_catalog.default.test_having.a' is not an aggregate function. Wrap '(min(spark_catalog.default.test_having.a) AS `min(a#x)`, max(spark_catalog.default.test_having.a) AS `max(a#x)`)' in windowing function(s) or wrap 'spark_catalog.default.test_having.a' in first() (or first_value) if you don't care which value you get. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out index 29899bd4fcb7b..d39f6101ac7ed 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_implicit.sql.out @@ -207,12 +207,7 @@ SELECT c, count(*) FROM test_missing_target GROUP BY 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 53" - } -} +GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 53 -- !query @@ -223,12 +218,7 @@ SELECT count(*) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 10" - } -} +Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 10 -- !query @@ -411,12 +401,7 @@ SELECT count(x.a) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 10" - } -} +Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 10 -- !query @@ -440,12 +425,7 @@ SELECT count(b) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'b' is ambiguous, could be: x.b, y.b.; line 1 pos 13" - } -} +Reference 'b' is ambiguous, could be: x.b, y.b.; line 1 pos 13 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out index 3a5e3190edbf2..81f964b4bea07 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out @@ -443,12 +443,7 @@ SELECT 'maca' LIKE 'm%aca' ESCAPE '%' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "the pattern 'm%aca' is invalid, the escape character is not allowed to precede 'a'" - } -} +the pattern 'm%aca' is invalid, the escape character is not allowed to precede 'a' -- !query @@ -457,12 +452,7 @@ SELECT 'maca' NOT LIKE 'm%aca' ESCAPE '%' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "the pattern 'm%aca' is invalid, the escape character is not allowed to precede 'a'" - } -} +the pattern 'm%aca' is invalid, the escape character is not allowed to precede 'a' -- !query @@ -471,12 +461,7 @@ SELECT 'ma%a' LIKE 'm%a%%a' ESCAPE '%' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "the pattern 'm%a%%a' is invalid, the escape character is not allowed to precede 'a'" - } -} +the pattern 'm%a%%a' is invalid, the escape character is not allowed to precede 'a' -- !query @@ -485,12 +470,7 @@ SELECT 'ma%a' NOT LIKE 'm%a%%a' ESCAPE '%' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "the pattern 'm%a%%a' is invalid, the escape character is not allowed to precede 'a'" - } -} +the pattern 'm%a%%a' is invalid, the escape character is not allowed to precede 'a' -- !query @@ -499,12 +479,7 @@ SELECT 'bear' LIKE 'b_ear' ESCAPE '_' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "the pattern 'b_ear' is invalid, the escape character is not allowed to precede 'e'" - } -} +the pattern 'b_ear' is invalid, the escape character is not allowed to precede 'e' -- !query @@ -513,12 +488,7 @@ SELECT 'bear' NOT LIKE 'b_ear' ESCAPE '_' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "the pattern 'b_ear' is invalid, the escape character is not allowed to precede 'e'" - } -} +the pattern 'b_ear' is invalid, the escape character is not allowed to precede 'e' -- !query @@ -527,12 +497,7 @@ SELECT 'be_r' LIKE 'b_e__r' ESCAPE '_' AS `true` struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "the pattern 'b_e__r' is invalid, the escape character is not allowed to precede 'e'" - } -} +the pattern 'b_e__r' is invalid, the escape character is not allowed to precede 'e' -- !query @@ -541,12 +506,7 @@ SELECT 'be_r' NOT LIKE 'b_e__r' ESCAPE '_' AS `false` struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "the pattern 'b_e__r' is invalid, the escape character is not allowed to precede 'e'" - } -} +the pattern 'b_e__r' is invalid, the escape character is not allowed to precede 'e' -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out index 2ed9b43b81ba4..906b47d62a023 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out @@ -248,12 +248,7 @@ select format_string('Hello %s %s', 'World') struct<> -- !query output java.util.MissingFormatArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Format specifier '%s'" - } -} +Format specifier '%s' -- !query @@ -262,12 +257,7 @@ select format_string('Hello %s') struct<> -- !query output java.util.MissingFormatArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Format specifier '%s'" - } -} +Format specifier '%s' -- !query @@ -300,12 +290,7 @@ select format_string('%1$s %4$s', 1, 2, 3) struct<> -- !query output java.util.MissingFormatArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Format specifier '%4$s'" - } -} +Format specifier '%4$s' -- !query @@ -314,12 +299,7 @@ select format_string('%1$s %13$s', 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) struct<> -- !query output java.util.MissingFormatArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Format specifier '%13$s'" - } -} +Format specifier '%13$s' -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out index 3ef9fb18f4e1e..1d6e249ff8692 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out @@ -69,12 +69,11 @@ insert into datetimes values struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): [CAST_INVALID_INPUT] The value '11:00 BST' of the type \"STRING\" cannot be cast to \"TIMESTAMP\" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.\n== SQL(line 2, position 24) ==\n(1, timestamp '11:00', cast ('11:00 BST' as timestamp), cast ('1 year' as timestamp), ...\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n; line 1 pos 22" - } -} +failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): [CAST_INVALID_INPUT] The value '11:00 BST' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +== SQL(line 2, position 24) == +(1, timestamp '11:00', cast ('11:00 BST' as timestamp), cast ('1 year' as timestamp), ... + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +; line 1 pos 22 -- !query @@ -247,12 +246,7 @@ from t1 where f1 = f2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(PARTITION BY spark_catalog.default.t1.f1 RANGE BETWEEN 1 PRECEDING AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 24" - } -} +cannot resolve '(PARTITION BY spark_catalog.default.t1.f1 RANGE BETWEEN 1 PRECEDING AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 24 -- !query @@ -302,12 +296,7 @@ SELECT * FROM empsalary WHERE row_number() OVER (ORDER BY salary) < 10 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "It is not allowed to use window functions inside WHERE clause" - } -} +It is not allowed to use window functions inside WHERE clause -- !query @@ -316,12 +305,10 @@ SELECT * FROM empsalary INNER JOIN tenk1 ON row_number() OVER (ORDER BY salary) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nThe query operator `Join` contains one or more unsupported\nexpression types Aggregate, Window or Generate.\nInvalid expressions: [row_number() OVER (ORDER BY spark_catalog.default.empsalary.salary ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]" - } -} + +The query operator `Join` contains one or more unsupported +expression types Aggregate, Window or Generate. +Invalid expressions: [row_number() OVER (ORDER BY spark_catalog.default.empsalary.salary ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)] -- !query @@ -330,12 +317,10 @@ SELECT rank() OVER (ORDER BY 1), count(*) FROM empsalary GROUP BY 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nThe query operator `Aggregate` contains one or more unsupported\nexpression types Aggregate, Window or Generate.\nInvalid expressions: [RANK() OVER (ORDER BY 1 ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)]" - } -} + +The query operator `Aggregate` contains one or more unsupported +expression types Aggregate, Window or Generate. +Invalid expressions: [RANK() OVER (ORDER BY 1 ASC NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)] -- !query @@ -360,12 +345,7 @@ SELECT * FROM empsalary WHERE (rank() OVER (ORDER BY random())) > 10 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "It is not allowed to use window functions inside WHERE clause" - } -} +It is not allowed to use window functions inside WHERE clause -- !query @@ -374,12 +354,7 @@ SELECT * FROM empsalary WHERE rank() OVER (ORDER BY random()) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "It is not allowed to use window functions inside WHERE clause" - } -} +It is not allowed to use window functions inside WHERE clause -- !query @@ -404,12 +379,7 @@ SELECT range(1, 100) OVER () FROM empsalary struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Undefined function: range. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.range.; line 1 pos 7" - } -} +Undefined function: range. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.range.; line 1 pos 7 -- !query @@ -418,12 +388,7 @@ SELECT ntile(0) OVER (ORDER BY ten), ten, four FROM tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'ntile(0)' due to data type mismatch: Buckets expression must be positive, but got: 0; line 1 pos 7" - } -} +cannot resolve 'ntile(0)' due to data type mismatch: Buckets expression must be positive, but got: 0; line 1 pos 7 -- !query @@ -432,12 +397,7 @@ SELECT nth_value(four, 0) OVER (ORDER BY ten), ten, four FROM tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'nth_value(spark_catalog.default.tenk1.four, 0)' due to data type mismatch: The 'offset' argument of nth_value must be greater than zero but it is 0.; line 1 pos 7" - } -} +cannot resolve 'nth_value(spark_catalog.default.tenk1.four, 0)' due to data type mismatch: The 'offset' argument of nth_value must be greater than zero but it is 0.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out index 81d909acd721d..60806a97b5bc8 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out @@ -498,9 +498,8 @@ FROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "failed to evaluate expression CAST('nan' AS INT): [CAST_INVALID_INPUT] The value 'nan' of the type \"STRING\" cannot be cast to \"INT\" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set \"spark.sql.ansi.enabled\" to \"false\" to bypass this error.\n== SQL(line 3, position 29) ==\nFROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b)\n ^^^^^^^^^^^^^^^^^^\n; line 3 pos 6" - } -} +failed to evaluate expression CAST('nan' AS INT): [CAST_INVALID_INPUT] The value 'nan' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +== SQL(line 3, position 29) == +FROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b) + ^^^^^^^^^^^^^^^^^^ +; line 3 pos 6 diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out index 6e57966745562..0a4e2f179d8c2 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/with.sql.out @@ -216,12 +216,7 @@ SELECT * FROM outermost ORDER BY 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view not found: outermost; line 4 pos 23" - } -} +Table or view not found: outermost; line 4 pos 23 -- !query @@ -351,12 +346,12 @@ create table foo (with baz) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nDataType baz is not supported.(line 1, pos 23)\n\n== SQL ==\ncreate table foo (with baz)\n-----------------------^^^\n" - } -} + +DataType baz is not supported.(line 1, pos 23) + +== SQL == +create table foo (with baz) +-----------------------^^^ -- !query @@ -365,12 +360,12 @@ create table foo (with ordinality) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nDataType ordinality is not supported.(line 1, pos 23)\n\n== SQL ==\ncreate table foo (with ordinality)\n-----------------------^^^\n" - } -} + +DataType ordinality is not supported.(line 1, pos 23) + +== SQL == +create table foo (with ordinality) +-----------------------^^^ -- !query @@ -387,12 +382,7 @@ WITH test AS (SELECT 42) INSERT INTO test VALUES (1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table not found: test; line 1 pos 37" - } -} +Table not found: test; line 1 pos 37 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/random.sql.out b/sql/core/src/test/resources/sql-tests/results/random.sql.out index 27729cfb900e2..e3edddb26d90c 100644 --- a/sql/core/src/test/resources/sql-tests/results/random.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/random.sql.out @@ -37,12 +37,7 @@ SELECT rand(1.0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'rand(1.0BD)' due to data type mismatch: argument 1 requires (int or bigint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7" - } -} +cannot resolve 'rand(1.0BD)' due to data type mismatch: argument 1 requires (int or bigint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7 -- !query @@ -83,9 +78,4 @@ SELECT rand('1') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'rand('1')' due to data type mismatch: argument 1 requires (int or bigint) type, however, ''1'' is of string type.; line 1 pos 7" - } -} +cannot resolve 'rand('1')' due to data type mismatch: argument 1 requires (int or bigint) type, however, ''1'' is of string type.; line 1 pos 7 diff --git a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out index 0b4af39ca449c..65e1e31ae7cf3 100644 --- a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out @@ -5,12 +5,7 @@ SELECT regexp_extract('1a 2b 14m', '\\d+') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Regex group count is 0, but the specified group index is 1" - } -} +Regex group count is 0, but the specified group index is 1 -- !query @@ -27,12 +22,7 @@ SELECT regexp_extract('1a 2b 14m', '\\d+', 1) struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Regex group count is 0, but the specified group index is 1" - } -} +Regex group count is 0, but the specified group index is 1 -- !query @@ -41,12 +31,7 @@ SELECT regexp_extract('1a 2b 14m', '\\d+', 2) struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Regex group count is 0, but the specified group index is 2" - } -} +Regex group count is 0, but the specified group index is 2 -- !query @@ -55,12 +40,7 @@ SELECT regexp_extract('1a 2b 14m', '\\d+', -1) struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The specified group index cannot be less than zero" - } -} +The specified group index cannot be less than zero -- !query @@ -117,12 +97,7 @@ SELECT regexp_extract('1a 2b 14m', '(\\d+)([a-z]+)', 3) struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Regex group count is 2, but the specified group index is 3" - } -} +Regex group count is 2, but the specified group index is 3 -- !query @@ -131,12 +106,7 @@ SELECT regexp_extract('1a 2b 14m', '(\\d+)([a-z]+)', -1) struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The specified group index cannot be less than zero" - } -} +The specified group index cannot be less than zero -- !query @@ -178,12 +148,7 @@ SELECT regexp_extract_all('1a 2b 14m', '\\d+') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Regex group count is 0, but the specified group index is 1" - } -} +Regex group count is 0, but the specified group index is 1 -- !query @@ -200,12 +165,7 @@ SELECT regexp_extract_all('1a 2b 14m', '\\d+', 1) struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Regex group count is 0, but the specified group index is 1" - } -} +Regex group count is 0, but the specified group index is 1 -- !query @@ -214,12 +174,7 @@ SELECT regexp_extract_all('1a 2b 14m', '\\d+', 2) struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Regex group count is 0, but the specified group index is 2" - } -} +Regex group count is 0, but the specified group index is 2 -- !query @@ -228,12 +183,7 @@ SELECT regexp_extract_all('1a 2b 14m', '\\d+', -1) struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The specified group index cannot be less than zero" - } -} +The specified group index cannot be less than zero -- !query @@ -290,12 +240,7 @@ SELECT regexp_extract_all('1a 2b 14m', '(\\d+)([a-z]+)', 3) struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Regex group count is 2, but the specified group index is 3" - } -} +Regex group count is 2, but the specified group index is 3 -- !query @@ -304,12 +249,7 @@ SELECT regexp_extract_all('1a 2b 14m', '(\\d+)([a-z]+)', -1) struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The specified group index cannot be less than zero" - } -} +The specified group index cannot be less than zero -- !query @@ -359,12 +299,7 @@ SELECT regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something', -2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'regexp_replace('healthy, wealthy, and wise', '\\\\w+thy', 'something', -2)' due to data type mismatch: Position expression must be positive, but got: -2; line 1 pos 7" - } -} +cannot resolve 'regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something', -2)' due to data type mismatch: Position expression must be positive, but got: -2; line 1 pos 7 -- !query @@ -373,12 +308,7 @@ SELECT regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something', 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'regexp_replace('healthy, wealthy, and wise', '\\\\w+thy', 'something', 0)' due to data type mismatch: Position expression must be positive, but got: 0; line 1 pos 7" - } -} +cannot resolve 'regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something', 0)' due to data type mismatch: Position expression must be positive, but got: 0; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out index 8508d0713b1e0..181d5854badb4 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out @@ -209,12 +209,7 @@ SHOW TABLE EXTENDED LIKE 'show_t*' PARTITION(c='Us', d=1) struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchTableException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view 'show_t*' not found in database 'showdb'" - } -} +Table or view 'show_t*' not found in database 'showdb' -- !query @@ -223,12 +218,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`showdb`.`show_t1`'" - } -} +Partition spec is invalid. The spec (c) must match the partition spec (c, d) defined in table '`spark_catalog`.`showdb`.`show_t1`' -- !query @@ -237,12 +227,7 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(a='Us', d=1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "a is not a valid partition column in table `spark_catalog`.`showdb`.`show_t1`." - } -} +a is not a valid partition column in table `spark_catalog`.`showdb`.`show_t1`. -- !query @@ -251,12 +236,9 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Ch', d=1) struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Partition not found in table 'show_t1' database 'showdb':\nc -> Ch\nd -> 1" - } -} +Partition not found in table 'show_t1' database 'showdb': +c -> Ch +d -> 1 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/show-views.sql.out b/sql/core/src/test/resources/sql-tests/results/show-views.sql.out index 9ce27620a14ca..8dc2f663ba067 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-views.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-views.sql.out @@ -139,12 +139,7 @@ SHOW VIEWS IN wrongdb LIKE 'view_*' struct<> -- !query output org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Database 'wrongdb' not found" - } -} +Database 'wrongdb' not found -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out index 9269f093c52c7..8a858343b240e 100644 --- a/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show_columns.sql.out @@ -91,12 +91,7 @@ SHOW COLUMNS IN badtable FROM showdb struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view not found: showdb.badtable; line 1 pos 16" - } -} +Table or view not found: showdb.badtable; line 1 pos 16 -- !query @@ -114,12 +109,7 @@ SHOW COLUMNS IN showdb.showcolumn1 FROM baddb struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "SHOW COLUMNS with conflicting databases: 'baddb' != 'showdb'" - } -} +SHOW COLUMNS with conflicting databases: 'baddb' != 'showdb' -- !query @@ -137,12 +127,7 @@ SHOW COLUMNS IN showdb.showcolumn3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view not found: showdb.showcolumn3; line 1 pos 16" - } -} +Table or view not found: showdb.showcolumn3; line 1 pos 16 -- !query @@ -151,12 +136,7 @@ SHOW COLUMNS IN showcolumn3 FROM showdb struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view not found: showdb.showcolumn3; line 1 pos 16" - } -} +Table or view not found: showdb.showcolumn3; line 1 pos 16 -- !query @@ -165,12 +145,7 @@ SHOW COLUMNS IN showcolumn4 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table or view not found: showcolumn4; line 1 pos 16" - } -} +Table or view not found: showcolumn4; line 1 pos 16 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out index 3e090558ff88c..9521cbf0bf2b8 100644 --- a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out @@ -93,12 +93,7 @@ SELECT string(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Function string accepts only one argument; line 1 pos 7" - } -} +Function string accepts only one argument; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out index 1f70d88d87837..a02b27142ff21 100644 --- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out @@ -5,12 +5,7 @@ select concat_ws() struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "requirement failed: concat_ws requires at least one argument.; line 1 pos 7" - } -} +requirement failed: concat_ws requires at least one argument.; line 1 pos 7 -- !query @@ -19,12 +14,7 @@ select format_string() struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "requirement failed: format_string() should take at least 1 argument; line 1 pos 7" - } -} +requirement failed: format_string() should take at least 1 argument; line 1 pos 7 -- !query @@ -661,12 +651,7 @@ select decode() struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7" - } -} +Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7 -- !query @@ -675,12 +660,7 @@ select decode(encode('abc', 'utf-8')) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7" - } -} +Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7 -- !query @@ -1145,12 +1125,7 @@ select to_binary(null, cast(null as int)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7" - } -} +The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7 -- !query @@ -1159,12 +1134,7 @@ select to_binary('abc', 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7" - } -} +The 'format' parameter of function 'to_binary' needs to be a string literal.; line 1 pos 7 -- !query @@ -1173,12 +1143,7 @@ select to_binary('abc', 'invalidFormat') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid value for the 'format' parameter of function 'to_binary': invalidformat. The value has to be a case-insensitive string literal of 'hex', 'utf-8', or 'base64'." - } -} +Invalid value for the 'format' parameter of function 'to_binary': invalidformat. The value has to be a case-insensitive string literal of 'hex', 'utf-8', or 'base64'. -- !query @@ -1187,9 +1152,4 @@ select to_binary('a!', 'base64') struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Last unit does not have enough valid bits" - } -} +Last unit does not have enough valid bits diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out index dbe048da301dd..6962aea42206a 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/in-subquery/in-basic.sql.out @@ -38,12 +38,15 @@ select 1 from tab_a where (a1, b1) not in (select (a2, b2) from tab_b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(named_struct('a1', tab_a.a1, 'b1', tab_a.b1) IN (listquery()))' due to data type mismatch: \nThe number of columns in the left hand side of an IN subquery does not match the\nnumber of columns in the output of subquery.\n#columns in left hand side: 2.\n#columns in right hand side: 1.\nLeft side columns:\n[tab_a.a1, tab_a.b1].\nRight side columns:\n[`named_struct(a2, a2, b2, b2)`].; line 1 pos 35" - } -} +cannot resolve '(named_struct('a1', tab_a.a1, 'b1', tab_a.b1) IN (listquery()))' due to data type mismatch: +The number of columns in the left hand side of an IN subquery does not match the +number of columns in the output of subquery. +#columns in left hand side: 2. +#columns in right hand side: 1. +Left side columns: +[tab_a.a1, tab_a.b1]. +Right side columns: +[`named_struct(a2, a2, b2, b2)`].; line 1 pos 35 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out index 63ef903e7ccdd..9ae9778b10646 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/invalid-correlation.sql.out @@ -43,12 +43,7 @@ AND t2b = (SELECT max(avg) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "grouping expressions sequence is empty, and 't2.t2b' is not an aggregate function. Wrap '(avg(t2.t2b) AS avg)' in windowing function(s) or wrap 't2.t2b' in first() (or first_value) if you don't care which value you get." - } -} +grouping expressions sequence is empty, and 't2.t2b' is not an aggregate function. Wrap '(avg(t2.t2b) AS avg)' in windowing function(s) or wrap 't2.t2b' in first() (or first_value) if you don't care which value you get. -- !query @@ -65,12 +60,7 @@ WHERE t1a IN (SELECT min(t2a) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Resolved attribute(s) t2b#x missing from min(t2a)#x,t2c#x in operator !Filter t2c#x IN (list#x [t2b#x])." - } -} +Resolved attribute(s) t2b#x missing from min(t2a)#x,t2c#x in operator !Filter t2c#x IN (list#x [t2b#x]). -- !query @@ -85,12 +75,7 @@ HAVING EXISTS (SELECT t2a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: min((outer(t1.t1a) + t2.t2a))" - } -} +Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: min((outer(t1.t1a) + t2.t2a)) -- !query @@ -106,12 +91,7 @@ WHERE t1a IN (SELECT t2a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: min((outer(t2.t2a) + t3.t3a))" - } -} +Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: min((outer(t2.t2a) + t3.t3a)) -- !query @@ -125,12 +105,14 @@ WHERE t1a IN (SELECT t2a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Expressions referencing the outer query are not supported outside of WHERE/HAVING clauses:\nAggregate [min(outer(t2a#x)) AS min(outer(t2.t2a))#x]\n+- SubqueryAlias t3\n +- View (`t3`, [t3a#x,t3b#x,t3c#x])\n +- Project [cast(t3a#x as int) AS t3a#x, cast(t3b#x as int) AS t3b#x, cast(t3c#x as int) AS t3c#x]\n +- Project [t3a#x, t3b#x, t3c#x]\n +- SubqueryAlias t3\n +- LocalRelation [t3a#x, t3b#x, t3c#x]\n" - } -} +Expressions referencing the outer query are not supported outside of WHERE/HAVING clauses: +Aggregate [min(outer(t2a#x)) AS min(outer(t2.t2a))#x] ++- SubqueryAlias t3 + +- View (`t3`, [t3a#x,t3b#x,t3c#x]) + +- Project [cast(t3a#x as int) AS t3a#x, cast(t3b#x as int) AS t3b#x, cast(t3c#x as int) AS t3c#x] + +- Project [t3a#x, t3b#x, t3c#x] + +- SubqueryAlias t3 + +- LocalRelation [t3a#x, t3b#x, t3c#x] -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out index 65f475fbb9fb9..8a8334fb57d68 100644 --- a/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/subquery/negative-cases/subq-input-typecheck.sql.out @@ -61,12 +61,7 @@ FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Scalar subquery must return only one column, but got 2" - } -} +Scalar subquery must return only one column, but got 2 -- !query @@ -81,12 +76,7 @@ FROM t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Scalar subquery must return only one column, but got 2" - } -} +Scalar subquery must return only one column, but got 2 -- !query @@ -99,12 +89,15 @@ t1a IN (SELECT t2a, t2b struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(t1.t1a IN (listquery(t1.t1a)))' due to data type mismatch: \nThe number of columns in the left hand side of an IN subquery does not match the\nnumber of columns in the output of subquery.\n#columns in left hand side: 1.\n#columns in right hand side: 2.\nLeft side columns:\n[t1.t1a].\nRight side columns:\n[t2.t2a, t2.t2b].; line 3 pos 4" - } -} +cannot resolve '(t1.t1a IN (listquery(t1.t1a)))' due to data type mismatch: +The number of columns in the left hand side of an IN subquery does not match the +number of columns in the output of subquery. +#columns in left hand side: 1. +#columns in right hand side: 2. +Left side columns: +[t1.t1a]. +Right side columns: +[t2.t2a, t2.t2b].; line 3 pos 4 -- !query @@ -117,12 +110,15 @@ WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(named_struct('t1a', t1.t1a, 't1b', t1.t1b) IN (listquery(t1.t1a)))' due to data type mismatch: \nThe number of columns in the left hand side of an IN subquery does not match the\nnumber of columns in the output of subquery.\n#columns in left hand side: 2.\n#columns in right hand side: 1.\nLeft side columns:\n[t1.t1a, t1.t1b].\nRight side columns:\n[t2.t2a].; line 3 pos 11" - } -} +cannot resolve '(named_struct('t1a', t1.t1a, 't1b', t1.t1b) IN (listquery(t1.t1a)))' due to data type mismatch: +The number of columns in the left hand side of an IN subquery does not match the +number of columns in the output of subquery. +#columns in left hand side: 2. +#columns in right hand side: 1. +Left side columns: +[t1.t1a, t1.t1b]. +Right side columns: +[t2.t2a].; line 3 pos 11 -- !query @@ -136,9 +132,12 @@ WHERE struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(named_struct('t4a', t4.t4a, 't4b', t4.t4b, 't4c', t4.t4c) IN (listquery()))' due to data type mismatch: \nThe data type of one or more elements in the left hand side of an IN subquery\nis not compatible with the data type of the output of the subquery\nMismatched columns:\n[(t4.t4a:double, t5.t5a:timestamp), (t4.t4c:string, t5.t5c:bigint)]\nLeft side:\n[double, string, string].\nRight side:\n[timestamp, string, bigint].; line 3 pos 16" - } -} +cannot resolve '(named_struct('t4a', t4.t4a, 't4b', t4.t4b, 't4c', t4.t4c) IN (listquery()))' due to data type mismatch: +The data type of one or more elements in the left hand side of an IN subquery +is not compatible with the data type of the output of the subquery +Mismatched columns: +[(t4.t4a:double, t5.t5a:timestamp), (t4.t4c:string, t5.t5c:bigint)] +Left side: +[double, string, string]. +Right side: +[timestamp, string, bigint].; line 3 pos 16 diff --git a/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out b/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out index 0e1913abac66b..d1e3357a9cc50 100644 --- a/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/table-aliases.sql.out @@ -39,12 +39,7 @@ SELECT * FROM testData AS t(col1, col2, col3) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Number of column aliases does not match number of columns. Number of column aliases: 3; number of columns: 2.; line 1 pos 14" - } -} +Number of column aliases does not match number of columns. Number of column aliases: 3; number of columns: 2.; line 1 pos 14 -- !query @@ -53,12 +48,7 @@ SELECT * FROM testData AS t(col1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Number of column aliases does not match number of columns. Number of column aliases: 1; number of columns: 2.; line 1 pos 14" - } -} +Number of column aliases does not match number of columns. Number of column aliases: 1; number of columns: 2.; line 1 pos 14 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out index b2619db48d0a1..1ce6fbbdbc84b 100644 --- a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out @@ -5,12 +5,7 @@ select * from dummy(3) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "could not resolve `dummy` to a table-valued function; line 1 pos 14" - } -} +could not resolve `dummy` to a table-valued function; line 1 pos 14 -- !query @@ -72,12 +67,12 @@ select * from range(1, 1, 1, 1, 1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (integer, integer, integer, integer, integer): Invalid number of arguments for function range. Expected: one of 1, 2, 3 and 4; Found: 5; line 1 pos 14" - } -} +Table-valued function range with alternatives: + range(start: long, end: long, step: long, numSlices: integer) + range(start: long, end: long, step: long) + range(start: long, end: long) + range(end: long) +cannot be applied to (integer, integer, integer, integer, integer): Invalid number of arguments for function range. Expected: one of 1, 2, 3 and 4; Found: 5; line 1 pos 14 -- !query @@ -86,12 +81,12 @@ select * from range(1, null) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (integer, void): Incompatible input data type. Expected: long; Found: void; line 1 pos 14" - } -} +Table-valued function range with alternatives: + range(start: long, end: long, step: long, numSlices: integer) + range(start: long, end: long, step: long) + range(start: long, end: long) + range(end: long) +cannot be applied to (integer, void): Incompatible input data type. Expected: long; Found: void; line 1 pos 14 -- !query @@ -100,12 +95,12 @@ select * from range(array(1, 2, 3)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (array): Incompatible input data type. Expected: long; Found: array; line 1 pos 14" - } -} +Table-valued function range with alternatives: + range(start: long, end: long, step: long, numSlices: integer) + range(start: long, end: long, step: long) + range(start: long, end: long) + range(end: long) +cannot be applied to (array): Incompatible input data type. Expected: long; Found: array; line 1 pos 14 -- !query @@ -114,12 +109,12 @@ select * from range(0, 5, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Table-valued function range with alternatives: \n range(start: long, end: long, step: long, numSlices: integer)\n range(start: long, end: long, step: long)\n range(start: long, end: long)\n range(end: long)\ncannot be applied to (integer, integer, integer): requirement failed: step (0) cannot be 0; line 1 pos 14" - } -} +Table-valued function range with alternatives: + range(start: long, end: long, step: long, numSlices: integer) + range(start: long, end: long, step: long) + range(start: long, end: long) + range(end: long) +cannot be applied to (integer, integer, integer): requirement failed: step (0) cannot be 0; line 1 pos 14 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out b/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out index 89153c27462cd..9624cdfe7b9d9 100644 --- a/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/tablesample-negative.sql.out @@ -29,12 +29,12 @@ SELECT mydb1.t1 FROM t1 TABLESAMPLE (-1 PERCENT) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nSampling fraction (-0.01) must be on interval [0, 1](line 1, pos 24)\n\n== SQL ==\nSELECT mydb1.t1 FROM t1 TABLESAMPLE (-1 PERCENT)\n------------------------^^^\n" - } -} + +Sampling fraction (-0.01) must be on interval [0, 1](line 1, pos 24) + +== SQL == +SELECT mydb1.t1 FROM t1 TABLESAMPLE (-1 PERCENT) +------------------------^^^ -- !query @@ -43,12 +43,12 @@ SELECT mydb1.t1 FROM t1 TABLESAMPLE (101 PERCENT) struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nSampling fraction (1.01) must be on interval [0, 1](line 1, pos 24)\n\n== SQL ==\nSELECT mydb1.t1 FROM t1 TABLESAMPLE (101 PERCENT)\n------------------------^^^\n" - } -} + +Sampling fraction (1.01) must be on interval [0, 1](line 1, pos 24) + +== SQL == +SELECT mydb1.t1 FROM t1 TABLESAMPLE (101 PERCENT) +------------------------^^^ -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out index a75597406c719..acdc65a23f4b9 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out @@ -45,12 +45,7 @@ SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 45.678, 'CET') struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function make_timestamp_ntz. Expected: 6; Found: 7; line 1 pos 7" - } -} +Invalid number of arguments for function make_timestamp_ntz. Expected: 6; Found: 7; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out index 3698663eb231e..3e15bdf15e1e0 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out @@ -13,12 +13,12 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7) + +== SQL == +select timestamp '2019-01-01中文' +-------^^^ -- !query @@ -27,12 +27,12 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7) + +== SQL == +select timestamp'4294967297' +-------^^^ -- !query @@ -41,12 +41,12 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7) + +== SQL == +select timestamp'2021-01-01T12:30:4294967297.123456' +-------^^^ -- !query @@ -191,12 +191,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -205,12 +200,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -219,12 +209,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -233,12 +218,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -247,12 +227,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Rounding necessary" - } -} +Rounding necessary -- !query @@ -630,12 +605,7 @@ select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7 -- !query @@ -644,12 +614,7 @@ select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7" - } -} +cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7 -- !query @@ -682,12 +647,7 @@ select str - timestamp'2011-11-11 11:11:11' from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(ts_view.str - TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7" - } -} +cannot resolve '(ts_view.str - TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7 -- !query @@ -696,12 +656,7 @@ select timestamp'2011-11-11 11:11:11' - str from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7 -- !query @@ -710,12 +665,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 -- !query @@ -724,12 +674,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' (double and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP '2011-11-11 11:11:11')' (double and timestamp).; line 1 pos 7 -- !query @@ -738,12 +683,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP '2011-11-11 11:11:11' + NULL)' (timestamp and void).; line 1 pos 7 -- !query @@ -752,12 +692,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7" - } -} +cannot resolve '(NULL + TIMESTAMP '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP '2011-11-11 11:11:11')' (void and timestamp).; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index 4971b41a0f820..6494f530076ea 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -13,12 +13,12 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7) + +== SQL == +select timestamp '2019-01-01中文' +-------^^^ -- !query @@ -27,12 +27,12 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7) + +== SQL == +select timestamp'4294967297' +-------^^^ -- !query @@ -41,12 +41,12 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7) + +== SQL == +select timestamp'2021-01-01T12:30:4294967297.123456' +-------^^^ -- !query @@ -126,12 +126,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 61) struct<> -- !query output java.time.DateTimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 61. If necessary set spark.sql.ansi.enabled to false to bypass this error." - } -} +Invalid value for SecondOfMinute (valid values 0 - 59): 61. If necessary set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -156,12 +151,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 99.999999) struct<> -- !query output java.time.DateTimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 99. If necessary set spark.sql.ansi.enabled to false to bypass this error." - } -} +Invalid value for SecondOfMinute (valid values 0 - 59): 99. If necessary set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -170,12 +160,7 @@ SELECT make_timestamp(1, 1, 1, 1, 1, 999.999999) struct<> -- !query output java.time.DateTimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid value for SecondOfMinute (valid values 0 - 59): 999. If necessary set spark.sql.ansi.enabled to false to bypass this error." - } -} +Invalid value for SecondOfMinute (valid values 0 - 59): 999. If necessary set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -216,12 +201,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -230,12 +210,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -244,12 +219,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -258,12 +228,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -272,12 +237,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Rounding necessary" - } -} +Rounding necessary -- !query @@ -774,12 +734,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP_NTZ))' due to data type mismatch: '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP_NTZ))' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp_ntz; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP_NTZ))' due to data type mismatch: '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS TIMESTAMP_NTZ))' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp_ntz; line 1 pos 7 -- !query @@ -788,12 +743,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS TIMESTAMP_NTZ) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: '(CAST('1' AS TIMESTAMP_NTZ) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp_ntz; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS TIMESTAMP_NTZ) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: '(CAST('1' AS TIMESTAMP_NTZ) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' requires (numeric or interval day to second or interval year to month or interval) type, not timestamp_ntz; line 1 pos 7 -- !query @@ -802,12 +752,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' (timestamp_ntz and void).; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' (timestamp_ntz and void).; line 1 pos 7 -- !query @@ -816,12 +761,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (void and timestamp_ntz).; line 1 pos 7" - } -} +cannot resolve '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (void and timestamp_ntz).; line 1 pos 7 -- !query @@ -853,12 +793,7 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html" - } -} +Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -867,12 +802,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html" - } -} +Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -881,12 +811,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html" - } -} +Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out index 03160ba11a253..78afee51ed1d4 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out @@ -13,12 +13,12 @@ select timestamp '2019-01-01中文' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)\n\n== SQL ==\nselect timestamp '2019-01-01中文'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7) + +== SQL == +select timestamp '2019-01-01中文' +-------^^^ -- !query @@ -27,12 +27,12 @@ select timestamp'4294967297' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)\n\n== SQL ==\nselect timestamp'4294967297'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7) + +== SQL == +select timestamp'4294967297' +-------^^^ -- !query @@ -41,12 +41,12 @@ select timestamp'2021-01-01T12:30:4294967297.123456' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nCannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7)\n\n== SQL ==\nselect timestamp'2021-01-01T12:30:4294967297.123456'\n-------^^^\n" - } -} + +Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, pos 7) + +== SQL == +select timestamp'2021-01-01T12:30:4294967297.123456' +-------^^^ -- !query @@ -191,12 +191,7 @@ select TIMESTAMP_SECONDS(1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -205,12 +200,7 @@ select TIMESTAMP_SECONDS(-1230219000123123) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -219,12 +209,7 @@ select TIMESTAMP_MILLIS(92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -233,12 +218,7 @@ select TIMESTAMP_MILLIS(-92233720368547758) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "long overflow" - } -} +long overflow -- !query @@ -247,12 +227,7 @@ select TIMESTAMP_SECONDS(0.1234567) struct<> -- !query output java.lang.ArithmeticException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Rounding necessary" - } -} +Rounding necessary -- !query @@ -630,12 +605,7 @@ select timestamp'2011-11-11 11:11:11' - '2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' - '2011-11-11 11:11:10')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:10'' is of string type.; line 1 pos 7 -- !query @@ -644,12 +614,7 @@ select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP_NTZ '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7" - } -} +cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP_NTZ '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7 -- !query @@ -682,12 +647,7 @@ select str - timestamp'2011-11-11 11:11:11' from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(ts_view.str - TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7" - } -} +cannot resolve '(ts_view.str - TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7 -- !query @@ -696,12 +656,7 @@ select timestamp'2011-11-11 11:11:11' - str from ts_view struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' - ts_view.str)' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'ts_view.str' is of string type.; line 1 pos 7 -- !query @@ -710,12 +665,7 @@ select timestamp'2011-11-11 11:11:11' + '1' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp_ntz and double).; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + CAST('1' AS DOUBLE))' (timestamp_ntz and double).; line 1 pos 7 -- !query @@ -724,12 +674,7 @@ select '1' + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (double and timestamp_ntz).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (double and timestamp_ntz).; line 1 pos 7 -- !query @@ -738,12 +683,7 @@ select timestamp'2011-11-11 11:11:11' + null struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' (timestamp_ntz and void).; line 1 pos 7" - } -} +cannot resolve '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' due to data type mismatch: differing types in '(TIMESTAMP_NTZ '2011-11-11 11:11:11' + NULL)' (timestamp_ntz and void).; line 1 pos 7 -- !query @@ -752,12 +692,7 @@ select null + timestamp'2011-11-11 11:11:11' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (void and timestamp_ntz).; line 1 pos 7" - } -} +cannot resolve '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' due to data type mismatch: differing types in '(NULL + TIMESTAMP_NTZ '2011-11-11 11:11:11')' (void and timestamp_ntz).; line 1 pos 7 -- !query @@ -789,12 +724,7 @@ select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG') struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html" - } -} +Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -803,12 +733,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE') struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html" - } -} +Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query @@ -817,12 +742,7 @@ select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE') struct<> -- !query output java.lang.RuntimeException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html" - } -} +Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timezone.sql.out b/sql/core/src/test/resources/sql-tests/results/timezone.sql.out index a41a7f769b66a..7dbdb174be09a 100644 --- a/sql/core/src/test/resources/sql-tests/results/timezone.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timezone.sql.out @@ -45,12 +45,12 @@ SET TIME ZONE struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInvalid time zone displacement value(line 1, pos 0)\n\n== SQL ==\nSET TIME ZONE\n^^^\n" - } -} + +Invalid time zone displacement value(line 1, pos 0) + +== SQL == +SET TIME ZONE +^^^ -- !query @@ -59,12 +59,7 @@ SET TIME ZONE 'invalid/zone' struct<> -- !query output java.lang.IllegalArgumentException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "'invalid/zone' in spark.sql.session.timeZone is invalid. Cannot resolve the given timezone with ZoneId.of(_, ZoneId.SHORT_IDS)" - } -} +'invalid/zone' in spark.sql.session.timeZone is invalid. Cannot resolve the given timezone with ZoneId.of(_, ZoneId.SHORT_IDS) -- !query @@ -73,12 +68,12 @@ SET TIME ZONE INTERVAL 3 DAYS struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL 3 DAYS\n--------------^^^\n" - } -} + +The interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14) + +== SQL == +SET TIME ZONE INTERVAL 3 DAYS +--------------^^^ -- !query @@ -87,12 +82,12 @@ SET TIME ZONE INTERVAL 24 HOURS struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL 24 HOURS\n--------------^^^\n" - } -} + +The interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14) + +== SQL == +SET TIME ZONE INTERVAL 24 HOURS +--------------^^^ -- !query @@ -101,12 +96,12 @@ SET TIME ZONE INTERVAL '19:40:32' HOUR TO SECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL '19:40:32' HOUR TO SECOND\n--------------^^^\n" - } -} + +The interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14) + +== SQL == +SET TIME ZONE INTERVAL '19:40:32' HOUR TO SECOND +--------------^^^ -- !query @@ -115,12 +110,12 @@ SET TIME ZONE INTERVAL 10 HOURS 'GMT+1' struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nInvalid time zone displacement value(line 1, pos 0)\n\n== SQL ==\nSET TIME ZONE INTERVAL 10 HOURS 'GMT+1'\n^^^\n" - } -} + +Invalid time zone displacement value(line 1, pos 0) + +== SQL == +SET TIME ZONE INTERVAL 10 HOURS 'GMT+1' +^^^ -- !query @@ -129,9 +124,9 @@ SET TIME ZONE INTERVAL 10 HOURS 1 MILLISECOND struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nThe interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14)\n\n== SQL ==\nSET TIME ZONE INTERVAL 10 HOURS 1 MILLISECOND\n--------------^^^\n" - } -} + +The interval value must be in the range of [-18, +18] hours with second precision(line 1, pos 14) + +== SQL == +SET TIME ZONE INTERVAL 10 HOURS 1 MILLISECOND +--------------^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/transform.sql.out b/sql/core/src/test/resources/sql-tests/results/transform.sql.out index f886976494f19..c4e70b23fab4b 100644 --- a/sql/core/src/test/resources/sql-tests/results/transform.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/transform.sql.out @@ -400,12 +400,36 @@ SELECT a, b, decode(c, 'UTF-8'), d, e, f, g, h, i, j, k, l FROM ( struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nLINES TERMINATED BY only supports newline '\\n' right now: @(line 3, pos 4)\n\n== SQL ==\nSELECT a, b, decode(c, 'UTF-8'), d, e, f, g, h, i, j, k, l FROM (\n SELECT TRANSFORM(a, b, c, d, e, f, g, h, i, j, k, l)\n ROW FORMAT DELIMITED\n----^^^\n FIELDS TERMINATED BY ','\n LINES TERMINATED BY '@'\n NULL DEFINED AS 'NULL'\n USING 'cat' AS (\n a string,\n b string,\n c string,\n d string,\n e string,\n f string,\n g string,\n h string,\n i string,\n j string,\n k string,\n l string)\n ROW FORMAT DELIMITED\n FIELDS TERMINATED BY ','\n LINES TERMINATED BY '@'\n NULL DEFINED AS 'NULL'\n FROM t\n) tmp\n" - } -} + +LINES TERMINATED BY only supports newline '\n' right now: @(line 3, pos 4) + +== SQL == +SELECT a, b, decode(c, 'UTF-8'), d, e, f, g, h, i, j, k, l FROM ( + SELECT TRANSFORM(a, b, c, d, e, f, g, h, i, j, k, l) + ROW FORMAT DELIMITED +----^^^ + FIELDS TERMINATED BY ',' + LINES TERMINATED BY '@' + NULL DEFINED AS 'NULL' + USING 'cat' AS ( + a string, + b string, + c string, + d string, + e string, + f string, + g string, + h string, + i string, + j string, + k string, + l string) + ROW FORMAT DELIMITED + FIELDS TERMINATED BY ',' + LINES TERMINATED BY '@' + NULL DEFINED AS 'NULL' + FROM t +) tmp -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out index c392aaf19c853..8622b97a20502 100644 --- a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out @@ -141,12 +141,7 @@ SELECT try_add(interval 2 year, interval 2 second) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'INTERVAL '2' YEAR + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2' YEAR' is of interval year type.; line 1 pos 7" - } -} +cannot resolve 'INTERVAL '2' YEAR + INTERVAL '02' SECOND' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'INTERVAL '2' YEAR' is of interval year type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out index 6a67f793ec989..75a8baea6aa32 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/booleanEquality.sql.out @@ -77,12 +77,7 @@ SELECT true = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(true = CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7" - } -} +cannot resolve '(true = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(true = CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7 -- !query @@ -99,12 +94,7 @@ SELECT true = cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(true = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7" - } -} +cannot resolve '(true = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(true = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7 -- !query @@ -113,12 +103,7 @@ SELECT true = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(true = CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7" - } -} +cannot resolve '(true = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(true = CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7 -- !query @@ -191,12 +176,7 @@ SELECT true <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(true <=> CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7" - } -} +cannot resolve '(true <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(true <=> CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7 -- !query @@ -213,12 +193,7 @@ SELECT true <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(true <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7" - } -} +cannot resolve '(true <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(true <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7 -- !query @@ -227,12 +202,7 @@ SELECT true <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(true <=> CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7" - } -} +cannot resolve '(true <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(true <=> CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7 -- !query @@ -305,12 +275,7 @@ SELECT cast('1' as binary) = true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) = true)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = true)' (binary and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) = true)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = true)' (binary and boolean).; line 1 pos 7 -- !query @@ -327,12 +292,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = true)' (timestamp and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = true)' (timestamp and boolean).; line 1 pos 7 -- !query @@ -341,12 +301,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = true)' (date and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = true)' (date and boolean).; line 1 pos 7 -- !query @@ -419,12 +374,7 @@ SELECT cast('1' as binary) <=> true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) <=> true)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> true)' (binary and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) <=> true)' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> true)' (binary and boolean).; line 1 pos 7 -- !query @@ -441,12 +391,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> true)' (timestamp and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> true)' (timestamp and boolean).; line 1 pos 7 -- !query @@ -455,12 +400,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> true FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> true)' (date and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> true)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> true)' (date and boolean).; line 1 pos 7 -- !query @@ -533,12 +473,7 @@ SELECT false = cast('0' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(false = CAST('0' AS BINARY))' due to data type mismatch: differing types in '(false = CAST('0' AS BINARY))' (boolean and binary).; line 1 pos 7" - } -} +cannot resolve '(false = CAST('0' AS BINARY))' due to data type mismatch: differing types in '(false = CAST('0' AS BINARY))' (boolean and binary).; line 1 pos 7 -- !query @@ -555,12 +490,7 @@ SELECT false = cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(false = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(false = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7" - } -} +cannot resolve '(false = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(false = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7 -- !query @@ -569,12 +499,7 @@ SELECT false = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(false = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(false = CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7" - } -} +cannot resolve '(false = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(false = CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7 -- !query @@ -647,12 +572,7 @@ SELECT false <=> cast('0' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(false <=> CAST('0' AS BINARY))' due to data type mismatch: differing types in '(false <=> CAST('0' AS BINARY))' (boolean and binary).; line 1 pos 7" - } -} +cannot resolve '(false <=> CAST('0' AS BINARY))' due to data type mismatch: differing types in '(false <=> CAST('0' AS BINARY))' (boolean and binary).; line 1 pos 7 -- !query @@ -669,12 +589,7 @@ SELECT false <=> cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(false <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(false <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7" - } -} +cannot resolve '(false <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(false <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7 -- !query @@ -683,12 +598,7 @@ SELECT false <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(false <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(false <=> CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7" - } -} +cannot resolve '(false <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(false <=> CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7 -- !query @@ -761,12 +671,7 @@ SELECT cast('0' as binary) = false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('0' AS BINARY) = false)' due to data type mismatch: differing types in '(CAST('0' AS BINARY) = false)' (binary and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('0' AS BINARY) = false)' due to data type mismatch: differing types in '(CAST('0' AS BINARY) = false)' (binary and boolean).; line 1 pos 7 -- !query @@ -783,12 +688,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = false)' (timestamp and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = false)' (timestamp and boolean).; line 1 pos 7 -- !query @@ -797,12 +697,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = false)' (date and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = false)' (date and boolean).; line 1 pos 7 -- !query @@ -875,12 +770,7 @@ SELECT cast('0' as binary) <=> false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('0' AS BINARY) <=> false)' due to data type mismatch: differing types in '(CAST('0' AS BINARY) <=> false)' (binary and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('0' AS BINARY) <=> false)' due to data type mismatch: differing types in '(CAST('0' AS BINARY) <=> false)' (binary and boolean).; line 1 pos 7 -- !query @@ -897,12 +787,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> false)' (timestamp and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> false)' (timestamp and boolean).; line 1 pos 7 -- !query @@ -911,9 +796,4 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> false FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> false)' (date and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> false)' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> false)' (date and boolean).; line 1 pos 7 diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out index da4ceef61bbe6..11725df995189 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/caseWhenCoercion.sql.out @@ -77,12 +77,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast('2' as binary) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE binary END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE binary END; line 1 pos 7 -- !query @@ -91,12 +86,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE boolean END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE boolean END; line 1 pos 7 -- !query @@ -105,12 +95,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE timestamp END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE timestamp END; line 1 pos 7 -- !query @@ -119,12 +104,7 @@ SELECT CASE WHEN true THEN cast(1 as tinyint) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE date END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS TINYINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN tinyint ELSE date END; line 1 pos 7 -- !query @@ -197,12 +177,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast('2' as binary) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE binary END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE binary END; line 1 pos 7 -- !query @@ -211,12 +186,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast(2 as boolean) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE boolean END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE boolean END; line 1 pos 7 -- !query @@ -225,12 +195,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE timestamp END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE timestamp END; line 1 pos 7 -- !query @@ -239,12 +204,7 @@ SELECT CASE WHEN true THEN cast(1 as smallint) ELSE cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE date END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS SMALLINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN smallint ELSE date END; line 1 pos 7 -- !query @@ -317,12 +277,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE binary END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE binary END; line 1 pos 7 -- !query @@ -331,12 +286,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE boolean END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE boolean END; line 1 pos 7 -- !query @@ -345,12 +295,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast('2017-12-11 09:30:00.0' as t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE timestamp END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE timestamp END; line 1 pos 7 -- !query @@ -359,12 +304,7 @@ SELECT CASE WHEN true THEN cast(1 as int) ELSE cast('2017-12-11 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE date END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS INT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN int ELSE date END; line 1 pos 7 -- !query @@ -437,12 +377,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE binary END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE binary END; line 1 pos 7 -- !query @@ -451,12 +386,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE boolean END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE boolean END; line 1 pos 7 -- !query @@ -465,12 +395,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast('2017-12-11 09:30:00.0' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE timestamp END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE timestamp END; line 1 pos 7 -- !query @@ -479,12 +404,7 @@ SELECT CASE WHEN true THEN cast(1 as bigint) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE date END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BIGINT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN bigint ELSE date END; line 1 pos 7 -- !query @@ -557,12 +477,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE binary END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE binary END; line 1 pos 7 -- !query @@ -571,12 +486,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE boolean END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE boolean END; line 1 pos 7 -- !query @@ -585,12 +495,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast('2017-12-11 09:30:00.0' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE timestamp END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE timestamp END; line 1 pos 7 -- !query @@ -599,12 +504,7 @@ SELECT CASE WHEN true THEN cast(1 as float) ELSE cast('2017-12-11 09:30:00' as d struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE date END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS FLOAT) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN float ELSE date END; line 1 pos 7 -- !query @@ -677,12 +577,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE binary END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE binary END; line 1 pos 7 -- !query @@ -691,12 +586,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE boolean END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE boolean END; line 1 pos 7 -- !query @@ -705,12 +595,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast('2017-12-11 09:30:00.0' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE timestamp END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE timestamp END; line 1 pos 7 -- !query @@ -719,12 +604,7 @@ SELECT CASE WHEN true THEN cast(1 as double) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE date END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS DOUBLE) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN double ELSE date END; line 1 pos 7 -- !query @@ -797,12 +677,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast('2' as binary) EN struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE binary END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE binary END; line 1 pos 7 -- !query @@ -811,12 +686,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast(2 as boolean) END struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE boolean END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE boolean END; line 1 pos 7 -- !query @@ -825,12 +695,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast('2017-12-11 09:30 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE timestamp END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE timestamp END; line 1 pos 7 -- !query @@ -839,12 +704,7 @@ SELECT CASE WHEN true THEN cast(1 as decimal(10, 0)) ELSE cast('2017-12-11 09:30 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE date END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS DECIMAL(10,0)) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN decimal(10,0) ELSE date END; line 1 pos 7 -- !query @@ -917,12 +777,7 @@ SELECT CASE WHEN true THEN cast(1 as string) ELSE cast('2' as binary) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN string ELSE binary END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN string ELSE binary END; line 1 pos 7 -- !query @@ -931,12 +786,7 @@ SELECT CASE WHEN true THEN cast(1 as string) ELSE cast(2 as boolean) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN string ELSE boolean END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS STRING) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN string ELSE boolean END; line 1 pos 7 -- !query @@ -961,12 +811,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as tinyint) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE tinyint END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE tinyint END; line 1 pos 7 -- !query @@ -975,12 +820,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as smallint) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE smallint END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE smallint END; line 1 pos 7 -- !query @@ -989,12 +829,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as int) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE int END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE int END; line 1 pos 7 -- !query @@ -1003,12 +838,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as bigint) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE bigint END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE bigint END; line 1 pos 7 -- !query @@ -1017,12 +847,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as float) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE float END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE float END; line 1 pos 7 -- !query @@ -1031,12 +856,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as double) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE double END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE double END; line 1 pos 7 -- !query @@ -1045,12 +865,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as decimal(10, 0)) EN struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE decimal(10,0) END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE decimal(10,0) END; line 1 pos 7 -- !query @@ -1059,12 +874,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as string) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS STRING) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE string END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS STRING) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE string END; line 1 pos 7 -- !query @@ -1081,12 +891,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast(2 as boolean) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE boolean END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE boolean END; line 1 pos 7 -- !query @@ -1095,12 +900,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE timestamp END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE timestamp END; line 1 pos 7 -- !query @@ -1109,12 +909,7 @@ SELECT CASE WHEN true THEN cast('1' as binary) ELSE cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE date END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('1' AS BINARY) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN binary ELSE date END; line 1 pos 7 -- !query @@ -1123,12 +918,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as tinyint) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE tinyint END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE tinyint END; line 1 pos 7 -- !query @@ -1137,12 +927,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as smallint) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE smallint END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE smallint END; line 1 pos 7 -- !query @@ -1151,12 +936,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as int) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE int END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE int END; line 1 pos 7 -- !query @@ -1165,12 +945,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as bigint) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE bigint END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE bigint END; line 1 pos 7 -- !query @@ -1179,12 +954,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as float) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE float END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE float END; line 1 pos 7 -- !query @@ -1193,12 +963,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as double) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE double END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE double END; line 1 pos 7 -- !query @@ -1207,12 +972,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as decimal(10, 0)) END struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE decimal(10,0) END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE decimal(10,0) END; line 1 pos 7 -- !query @@ -1221,12 +981,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast(2 as string) END FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS STRING) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE string END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST(2 AS STRING) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE string END; line 1 pos 7 -- !query @@ -1235,12 +990,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast('2' as binary) END FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE binary END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE binary END; line 1 pos 7 -- !query @@ -1257,12 +1007,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE timestamp END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE timestamp END; line 1 pos 7 -- !query @@ -1271,12 +1016,7 @@ SELECT CASE WHEN true THEN cast(1 as boolean) ELSE cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE date END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST(1 AS BOOLEAN) ELSE CAST('2017-12-11 09:30:00' AS DATE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN boolean ELSE date END; line 1 pos 7 -- !query @@ -1285,12 +1025,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE tinyint END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE tinyint END; line 1 pos 7 -- !query @@ -1299,12 +1034,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE smallint END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE smallint END; line 1 pos 7 -- !query @@ -1313,12 +1043,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE int END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE int END; line 1 pos 7 -- !query @@ -1327,12 +1052,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE bigint END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE bigint END; line 1 pos 7 -- !query @@ -1341,12 +1061,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE float END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE float END; line 1 pos 7 -- !query @@ -1355,12 +1070,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE double END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE double END; line 1 pos 7 -- !query @@ -1369,12 +1079,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE decimal(10,0) END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE decimal(10,0) END; line 1 pos 7 -- !query @@ -1391,12 +1096,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE binary END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE binary END; line 1 pos 7 -- !query @@ -1405,12 +1105,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00.0' as timestamp) ELSE cast( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE boolean END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN timestamp ELSE boolean END; line 1 pos 7 -- !query @@ -1435,12 +1130,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE tinyint END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS TINYINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE tinyint END; line 1 pos 7 -- !query @@ -1449,12 +1139,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as sm struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE smallint END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS SMALLINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE smallint END; line 1 pos 7 -- !query @@ -1463,12 +1148,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as in struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE int END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS INT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE int END; line 1 pos 7 -- !query @@ -1477,12 +1157,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as bi struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE bigint END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS BIGINT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE bigint END; line 1 pos 7 -- !query @@ -1491,12 +1166,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as fl struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE float END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS FLOAT) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE float END; line 1 pos 7 -- !query @@ -1505,12 +1175,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as do struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE double END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS DOUBLE) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE double END; line 1 pos 7 -- !query @@ -1519,12 +1184,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as de struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE decimal(10,0) END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS DECIMAL(10,0)) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE decimal(10,0) END; line 1 pos 7 -- !query @@ -1541,12 +1201,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast('2' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE binary END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST('2' AS BINARY) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE binary END; line 1 pos 7 -- !query @@ -1555,12 +1210,7 @@ SELECT CASE WHEN true THEN cast('2017-12-12 09:30:00' as date) ELSE cast(2 as bo struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE boolean END; line 1 pos 7" - } -} +cannot resolve 'CASE WHEN true THEN CAST('2017-12-12 09:30:00' AS DATE) ELSE CAST(2 AS BOOLEAN) END' due to data type mismatch: THEN and ELSE expressions should all be same type or coercible to a common type, got CASE WHEN ... THEN date ELSE boolean END; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out index efb00d93e02e2..94422bd2089f5 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out @@ -13,12 +13,7 @@ select cast(1 as tinyint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 -- !query @@ -27,12 +22,7 @@ select cast(1 as smallint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 -- !query @@ -41,12 +31,7 @@ select cast(1 as int) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS INT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS INT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 -- !query @@ -55,12 +40,7 @@ select cast(1 as bigint) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 -- !query @@ -69,12 +49,7 @@ select cast(1 as float) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 -- !query @@ -83,12 +58,7 @@ select cast(1 as double) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -97,12 +67,7 @@ select cast(1 as decimal(10, 0)) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -127,12 +92,7 @@ select cast('1' as binary) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7" - } -} +cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 -- !query @@ -141,12 +101,7 @@ select cast(1 as boolean) + interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 -- !query @@ -171,12 +126,7 @@ select interval 2 day + cast(1 as tinyint) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS TINYINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 -- !query @@ -185,12 +135,7 @@ select interval 2 day + cast(1 as smallint) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS SMALLINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 -- !query @@ -199,12 +144,7 @@ select interval 2 day + cast(1 as int) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS INT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS INT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 -- !query @@ -213,12 +153,7 @@ select interval 2 day + cast(1 as bigint) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS BIGINT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 -- !query @@ -227,12 +162,7 @@ select interval 2 day + cast(1 as float) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS FLOAT) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 -- !query @@ -241,12 +171,7 @@ select interval 2 day + cast(1 as double) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS DOUBLE) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -255,12 +180,7 @@ select interval 2 day + cast(1 as decimal(10, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -285,12 +205,7 @@ select interval 2 day + cast('1' as binary) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7" - } -} +cannot resolve 'CAST('1' AS BINARY) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 -- !query @@ -299,12 +214,7 @@ select interval 2 day + cast(1 as boolean) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS BOOLEAN) + INTERVAL '2' DAY' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 -- !query @@ -329,12 +239,7 @@ select cast(1 as tinyint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS TINYINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS TINYINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS TINYINT)' is of tinyint type.; line 1 pos 7 -- !query @@ -343,12 +248,7 @@ select cast(1 as smallint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS SMALLINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS SMALLINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS SMALLINT)' is of smallint type.; line 1 pos 7 -- !query @@ -357,12 +257,7 @@ select cast(1 as int) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS INT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS INT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS INT)' is of int type.; line 1 pos 7 -- !query @@ -371,12 +266,7 @@ select cast(1 as bigint) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS BIGINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS BIGINT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BIGINT)' is of bigint type.; line 1 pos 7 -- !query @@ -385,12 +275,7 @@ select cast(1 as float) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS FLOAT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS FLOAT) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS FLOAT)' is of float type.; line 1 pos 7 -- !query @@ -399,12 +284,7 @@ select cast(1 as double) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS DOUBLE) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS DOUBLE) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -413,12 +293,7 @@ select cast(1 as decimal(10, 0)) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS DECIMAL(10,0)) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS DECIMAL(10,0)) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -443,12 +318,7 @@ select cast('1' as binary) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST('1' AS BINARY) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7" - } -} +cannot resolve 'CAST('1' AS BINARY) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST('1' AS BINARY)' is of binary type.; line 1 pos 7 -- !query @@ -457,12 +327,7 @@ select cast(1 as boolean) - interval 2 day struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'CAST(1 AS BOOLEAN) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7" - } -} +cannot resolve 'CAST(1 AS BOOLEAN) + (- INTERVAL '2' DAY)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS BOOLEAN)' is of boolean type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out index 4315da0083bdd..2dbe93f312d24 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out @@ -237,12 +237,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -251,12 +246,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -265,12 +255,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -279,12 +264,7 @@ SELECT cast('1' as binary) + cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -293,12 +273,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 -- !query @@ -307,12 +282,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 -- !query @@ -321,12 +291,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -335,12 +300,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 -- !query @@ -349,12 +309,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 -- !query @@ -363,12 +318,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 -- !query @@ -377,12 +327,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -391,12 +336,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 -- !query @@ -661,12 +601,7 @@ SELECT cast(1 as decimal(3, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -675,12 +610,7 @@ SELECT cast(1 as decimal(5, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -689,12 +619,7 @@ SELECT cast(1 as decimal(10, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -703,12 +628,7 @@ SELECT cast(1 as decimal(20, 0)) + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -717,12 +637,7 @@ SELECT cast(1 as decimal(3, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 -- !query @@ -731,12 +646,7 @@ SELECT cast(1 as decimal(5, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 -- !query @@ -745,12 +655,7 @@ SELECT cast(1 as decimal(10, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 -- !query @@ -759,12 +664,7 @@ SELECT cast(1 as decimal(20, 0)) + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 -- !query @@ -773,12 +673,7 @@ SELECT cast(1 as decimal(3, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 -- !query @@ -787,12 +682,7 @@ SELECT cast(1 as decimal(5, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 -- !query @@ -801,12 +691,7 @@ SELECT cast(1 as decimal(10, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -815,12 +700,7 @@ SELECT cast(1 as decimal(20, 0)) + cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 -- !query @@ -829,12 +709,7 @@ SELECT cast(1 as decimal(3, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 -- !query @@ -843,12 +718,7 @@ SELECT cast(1 as decimal(5, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 -- !query @@ -857,12 +727,7 @@ SELECT cast(1 as decimal(10, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -871,12 +736,7 @@ SELECT cast(1 as decimal(20, 0)) + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 -- !query @@ -1109,12 +969,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -1123,12 +978,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -1137,12 +987,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -1151,12 +996,7 @@ SELECT cast('1' as binary) - cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -1165,12 +1005,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 -- !query @@ -1179,12 +1014,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 -- !query @@ -1193,12 +1023,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -1207,12 +1032,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 -- !query @@ -1221,12 +1041,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 -- !query @@ -1235,12 +1050,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 -- !query @@ -1249,12 +1059,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -1263,12 +1068,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 -- !query @@ -1533,12 +1333,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) - CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) - CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -1547,12 +1342,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) - CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) - CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -1561,12 +1351,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -1575,12 +1360,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) - CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) - CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -1589,12 +1369,7 @@ SELECT cast(1 as decimal(3, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) - CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) - CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 -- !query @@ -1603,12 +1378,7 @@ SELECT cast(1 as decimal(5, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) - CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) - CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 -- !query @@ -1617,12 +1387,7 @@ SELECT cast(1 as decimal(10, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) - CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 -- !query @@ -1631,12 +1396,7 @@ SELECT cast(1 as decimal(20, 0)) - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) - CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) - CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 -- !query @@ -1645,12 +1405,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 -- !query @@ -1659,12 +1414,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 -- !query @@ -1673,12 +1423,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -1687,12 +1432,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 -- !query @@ -1701,12 +1441,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 -- !query @@ -1715,12 +1450,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 -- !query @@ -1729,12 +1459,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -1743,12 +1468,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 -- !query @@ -1981,12 +1701,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -1995,12 +1710,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -2009,12 +1719,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -2023,12 +1728,7 @@ SELECT cast('1' as binary) * cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -2037,12 +1737,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 -- !query @@ -2051,12 +1746,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 -- !query @@ -2065,12 +1755,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -2079,12 +1764,7 @@ SELECT cast('2017*12*11 09:30:00.0' as timestamp) * cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00.0' AS TIMESTAMP) * CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 -- !query @@ -2093,12 +1773,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 -- !query @@ -2107,12 +1782,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 -- !query @@ -2121,12 +1791,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -2135,12 +1800,7 @@ SELECT cast('2017*12*11 09:30:00' as date) * cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017*12*11 09:30:00' AS DATE) * CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 -- !query @@ -2405,12 +2065,7 @@ SELECT cast(1 as decimal(3, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -2419,12 +2074,7 @@ SELECT cast(1 as decimal(5, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -2433,12 +2083,7 @@ SELECT cast(1 as decimal(10, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -2447,12 +2092,7 @@ SELECT cast(1 as decimal(20, 0)) * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -2461,12 +2101,7 @@ SELECT cast(1 as decimal(3, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 -- !query @@ -2475,12 +2110,7 @@ SELECT cast(1 as decimal(5, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 -- !query @@ -2489,12 +2119,7 @@ SELECT cast(1 as decimal(10, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 -- !query @@ -2503,12 +2128,7 @@ SELECT cast(1 as decimal(20, 0)) * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 -- !query @@ -2517,12 +2137,7 @@ SELECT cast(1 as decimal(3, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 -- !query @@ -2531,12 +2146,7 @@ SELECT cast(1 as decimal(5, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 -- !query @@ -2545,12 +2155,7 @@ SELECT cast(1 as decimal(10, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -2559,12 +2164,7 @@ SELECT cast(1 as decimal(20, 0)) * cast('2017*12*11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 -- !query @@ -2573,12 +2173,7 @@ SELECT cast(1 as decimal(3, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 -- !query @@ -2587,12 +2182,7 @@ SELECT cast(1 as decimal(5, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 -- !query @@ -2601,12 +2191,7 @@ SELECT cast(1 as decimal(10, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -2615,12 +2200,7 @@ SELECT cast(1 as decimal(20, 0)) * cast('2017*12*11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) * CAST('2017*12*11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 -- !query @@ -2853,12 +2433,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -2867,12 +2442,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -2881,12 +2451,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -2895,12 +2460,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -2909,12 +2469,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 -- !query @@ -2923,12 +2478,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 -- !query @@ -2937,12 +2487,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -2951,12 +2496,7 @@ SELECT cast('2017/12/11 09:30:00.0' as timestamp) / cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 -- !query @@ -2965,12 +2505,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 -- !query @@ -2979,12 +2514,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 -- !query @@ -2993,12 +2523,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -3007,12 +2532,7 @@ SELECT cast('2017/12/11 09:30:00' as date) / cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017/12/11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 -- !query @@ -3277,12 +2797,7 @@ SELECT cast(1 as decimal(3, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -3291,12 +2806,7 @@ SELECT cast(1 as decimal(5, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -3305,12 +2815,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -3319,12 +2824,7 @@ SELECT cast(1 as decimal(20, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -3333,12 +2833,7 @@ SELECT cast(1 as decimal(3, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 -- !query @@ -3347,12 +2842,7 @@ SELECT cast(1 as decimal(5, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 -- !query @@ -3361,12 +2851,7 @@ SELECT cast(1 as decimal(10, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 -- !query @@ -3375,12 +2860,7 @@ SELECT cast(1 as decimal(20, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 -- !query @@ -3389,12 +2869,7 @@ SELECT cast(1 as decimal(3, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 -- !query @@ -3403,12 +2878,7 @@ SELECT cast(1 as decimal(5, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 -- !query @@ -3417,12 +2887,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -3431,12 +2896,7 @@ SELECT cast(1 as decimal(20, 0)) / cast('2017/12/11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 -- !query @@ -3445,12 +2905,7 @@ SELECT cast(1 as decimal(3, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 -- !query @@ -3459,12 +2914,7 @@ SELECT cast(1 as decimal(5, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 -- !query @@ -3473,12 +2923,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -3487,12 +2932,7 @@ SELECT cast(1 as decimal(20, 0)) / cast('2017/12/11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) / CAST('2017/12/11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 -- !query @@ -3725,12 +3165,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -3739,12 +3174,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -3753,12 +3183,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -3767,12 +3192,7 @@ SELECT cast('1' as binary) % cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -3781,12 +3201,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 -- !query @@ -3795,12 +3210,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 -- !query @@ -3809,12 +3219,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -3823,12 +3228,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 -- !query @@ -3837,12 +3237,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 -- !query @@ -3851,12 +3246,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 -- !query @@ -3865,12 +3255,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -3879,12 +3264,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 -- !query @@ -4149,12 +3529,7 @@ SELECT cast(1 as decimal(3, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -4163,12 +3538,7 @@ SELECT cast(1 as decimal(5, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -4177,12 +3547,7 @@ SELECT cast(1 as decimal(10, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -4191,12 +3556,7 @@ SELECT cast(1 as decimal(20, 0)) % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -4205,12 +3565,7 @@ SELECT cast(1 as decimal(3, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 -- !query @@ -4219,12 +3574,7 @@ SELECT cast(1 as decimal(5, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 -- !query @@ -4233,12 +3583,7 @@ SELECT cast(1 as decimal(10, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 -- !query @@ -4247,12 +3592,7 @@ SELECT cast(1 as decimal(20, 0)) % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 -- !query @@ -4261,12 +3601,7 @@ SELECT cast(1 as decimal(3, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 -- !query @@ -4275,12 +3610,7 @@ SELECT cast(1 as decimal(5, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 -- !query @@ -4289,12 +3619,7 @@ SELECT cast(1 as decimal(10, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -4303,12 +3628,7 @@ SELECT cast(1 as decimal(20, 0)) % cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 -- !query @@ -4317,12 +3637,7 @@ SELECT cast(1 as decimal(3, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 -- !query @@ -4331,12 +3646,7 @@ SELECT cast(1 as decimal(5, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 -- !query @@ -4345,12 +3655,7 @@ SELECT cast(1 as decimal(10, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -4359,12 +3664,7 @@ SELECT cast(1 as decimal(20, 0)) % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) % CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 -- !query @@ -4597,12 +3897,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(3, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -4611,12 +3906,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(5, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -4625,12 +3915,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -4639,12 +3924,7 @@ SELECT pmod(cast('1' as binary), cast(1 as decimal(20, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -4653,12 +3933,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(3, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 -- !query @@ -4667,12 +3942,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(5, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 -- !query @@ -4681,12 +3951,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(10, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -4695,12 +3960,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), cast(1 as decimal(20, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 -- !query @@ -4709,12 +3969,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(3, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 -- !query @@ -4723,12 +3978,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(5, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 -- !query @@ -4737,12 +3987,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(10, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -4751,12 +3996,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), cast(1 as decimal(20, 0))) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 -- !query @@ -5021,12 +4261,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -5035,12 +4270,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -5049,12 +4279,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -5063,12 +4288,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -5077,12 +4297,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 -- !query @@ -5091,12 +4306,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 -- !query @@ -5105,12 +4315,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 -- !query @@ -5119,12 +4324,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 -- !query @@ -5133,12 +4333,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 -- !query @@ -5147,12 +4342,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 -- !query @@ -5161,12 +4351,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -5175,12 +4360,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast('2017-12-11 09:30:00.0' as timestamp struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 -- !query @@ -5189,12 +4369,7 @@ SELECT pmod(cast(1 as decimal(3, 0)) , cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 -- !query @@ -5203,12 +4378,7 @@ SELECT pmod(cast(1 as decimal(5, 0)) , cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 -- !query @@ -5217,12 +4387,7 @@ SELECT pmod(cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -5231,12 +4396,7 @@ SELECT pmod(cast(1 as decimal(20, 0)), cast('2017-12-11 09:30:00' as date)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 -- !query @@ -5469,12 +4629,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -5483,12 +4638,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -5497,12 +4647,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -5511,12 +4656,7 @@ SELECT cast('1' as binary) = cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -5525,12 +4665,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 -- !query @@ -5539,12 +4674,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 -- !query @@ -5553,12 +4683,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -5567,12 +4692,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) = cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 -- !query @@ -5581,12 +4701,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 -- !query @@ -5595,12 +4710,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 -- !query @@ -5609,12 +4719,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -5623,12 +4728,7 @@ SELECT cast('2017-12-11 09:30:00' as date) = cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 -- !query @@ -5893,12 +4993,7 @@ SELECT cast(1 as decimal(3, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -5907,12 +5002,7 @@ SELECT cast(1 as decimal(5, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -5921,12 +5011,7 @@ SELECT cast(1 as decimal(10, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -5935,12 +5020,7 @@ SELECT cast(1 as decimal(20, 0)) = cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -5981,12 +5061,7 @@ SELECT cast(1 as decimal(3, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 -- !query @@ -5995,12 +5070,7 @@ SELECT cast(1 as decimal(5, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 -- !query @@ -6009,12 +5079,7 @@ SELECT cast(1 as decimal(10, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -6023,12 +5088,7 @@ SELECT cast(1 as decimal(20, 0)) = cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 -- !query @@ -6037,12 +5097,7 @@ SELECT cast(1 as decimal(3, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 -- !query @@ -6051,12 +5106,7 @@ SELECT cast(1 as decimal(5, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 -- !query @@ -6065,12 +5115,7 @@ SELECT cast(1 as decimal(10, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -6079,12 +5124,7 @@ SELECT cast(1 as decimal(20, 0)) = cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 -- !query @@ -6317,12 +5357,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -6331,12 +5366,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -6345,12 +5375,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -6359,12 +5384,7 @@ SELECT cast('1' as binary) <=> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <=> CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -6373,12 +5393,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(3, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 -- !query @@ -6387,12 +5402,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(5, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 -- !query @@ -6401,12 +5411,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(10, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -6415,12 +5420,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <=> cast(1 as decimal(20, 0)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <=> CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 -- !query @@ -6429,12 +5429,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 -- !query @@ -6443,12 +5438,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 -- !query @@ -6457,12 +5447,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -6471,12 +5456,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <=> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <=> CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 -- !query @@ -6741,12 +5721,7 @@ SELECT cast(1 as decimal(3, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -6755,12 +5730,7 @@ SELECT cast(1 as decimal(5, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -6769,12 +5739,7 @@ SELECT cast(1 as decimal(10, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -6783,12 +5748,7 @@ SELECT cast(1 as decimal(20, 0)) <=> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -6829,12 +5789,7 @@ SELECT cast(1 as decimal(3, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 -- !query @@ -6843,12 +5798,7 @@ SELECT cast(1 as decimal(5, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 -- !query @@ -6857,12 +5807,7 @@ SELECT cast(1 as decimal(10, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -6871,12 +5816,7 @@ SELECT cast(1 as decimal(20, 0)) <=> cast('2017-12-11 09:30:00.0' as timestamp) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 -- !query @@ -6885,12 +5825,7 @@ SELECT cast(1 as decimal(3, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 -- !query @@ -6899,12 +5834,7 @@ SELECT cast(1 as decimal(5, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 -- !query @@ -6913,12 +5843,7 @@ SELECT cast(1 as decimal(10, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -6927,12 +5852,7 @@ SELECT cast(1 as decimal(20, 0)) <=> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <=> CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 -- !query @@ -7165,12 +6085,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -7179,12 +6094,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -7193,12 +6103,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -7207,12 +6112,7 @@ SELECT cast('1' as binary) < cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) < CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -7221,12 +6121,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 -- !query @@ -7235,12 +6130,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 -- !query @@ -7249,12 +6139,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -7263,12 +6148,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) < cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) < CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 -- !query @@ -7277,12 +6157,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 -- !query @@ -7291,12 +6166,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 -- !query @@ -7305,12 +6175,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -7319,12 +6184,7 @@ SELECT cast('2017-12-11 09:30:00' as date) < cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) < CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 -- !query @@ -7589,12 +6449,7 @@ SELECT cast(1 as decimal(3, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -7603,12 +6458,7 @@ SELECT cast(1 as decimal(5, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -7617,12 +6467,7 @@ SELECT cast(1 as decimal(10, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -7631,12 +6476,7 @@ SELECT cast(1 as decimal(20, 0)) < cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -7645,12 +6485,7 @@ SELECT cast(1 as decimal(3, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 -- !query @@ -7659,12 +6494,7 @@ SELECT cast(1 as decimal(5, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 -- !query @@ -7673,12 +6503,7 @@ SELECT cast(1 as decimal(10, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 -- !query @@ -7687,12 +6512,7 @@ SELECT cast(1 as decimal(20, 0)) < cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 -- !query @@ -7701,12 +6521,7 @@ SELECT cast(1 as decimal(3, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 -- !query @@ -7715,12 +6530,7 @@ SELECT cast(1 as decimal(5, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 -- !query @@ -7729,12 +6539,7 @@ SELECT cast(1 as decimal(10, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -7743,12 +6548,7 @@ SELECT cast(1 as decimal(20, 0)) < cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 -- !query @@ -7757,12 +6557,7 @@ SELECT cast(1 as decimal(3, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 -- !query @@ -7771,12 +6566,7 @@ SELECT cast(1 as decimal(5, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 -- !query @@ -7785,12 +6575,7 @@ SELECT cast(1 as decimal(10, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -7799,12 +6584,7 @@ SELECT cast(1 as decimal(20, 0)) < cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) < CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 -- !query @@ -8037,12 +6817,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -8051,12 +6826,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -8065,12 +6835,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -8079,12 +6844,7 @@ SELECT cast('1' as binary) <= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) <= CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -8093,12 +6853,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(3, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 -- !query @@ -8107,12 +6862,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(5, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 -- !query @@ -8121,12 +6871,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(10, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -8135,12 +6880,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <= cast(1 as decimal(20, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) <= CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 -- !query @@ -8149,12 +6889,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 -- !query @@ -8163,12 +6898,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 -- !query @@ -8177,12 +6907,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -8191,12 +6916,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) <= CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 -- !query @@ -8461,12 +7181,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -8475,12 +7190,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -8489,12 +7199,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -8503,12 +7208,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -8517,12 +7217,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 -- !query @@ -8531,12 +7226,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 -- !query @@ -8545,12 +7235,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 -- !query @@ -8559,12 +7244,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 -- !query @@ -8573,12 +7253,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 -- !query @@ -8587,12 +7262,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 -- !query @@ -8601,12 +7271,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -8615,12 +7280,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 -- !query @@ -8629,12 +7289,7 @@ SELECT cast(1 as decimal(3, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 -- !query @@ -8643,12 +7298,7 @@ SELECT cast(1 as decimal(5, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 -- !query @@ -8657,12 +7307,7 @@ SELECT cast(1 as decimal(10, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -8671,12 +7316,7 @@ SELECT cast(1 as decimal(20, 0)) <= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) <= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 -- !query @@ -8909,12 +7549,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -8923,12 +7558,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -8937,12 +7567,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -8951,12 +7576,7 @@ SELECT cast('1' as binary) > cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) > CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -8965,12 +7585,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 -- !query @@ -8979,12 +7594,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 -- !query @@ -8993,12 +7603,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -9007,12 +7612,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) > cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) > CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 -- !query @@ -9021,12 +7621,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 -- !query @@ -9035,12 +7630,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 -- !query @@ -9049,12 +7639,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -9063,12 +7648,7 @@ SELECT cast('2017-12-11 09:30:00' as date) > cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) > CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 -- !query @@ -9333,12 +7913,7 @@ SELECT cast(1 as decimal(3, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -9347,12 +7922,7 @@ SELECT cast(1 as decimal(5, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -9361,12 +7931,7 @@ SELECT cast(1 as decimal(10, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -9375,12 +7940,7 @@ SELECT cast(1 as decimal(20, 0)) > cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -9389,12 +7949,7 @@ SELECT cast(1 as decimal(3, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 -- !query @@ -9403,12 +7958,7 @@ SELECT cast(1 as decimal(5, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 -- !query @@ -9417,12 +7967,7 @@ SELECT cast(1 as decimal(10, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 -- !query @@ -9431,12 +7976,7 @@ SELECT cast(1 as decimal(20, 0)) > cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 -- !query @@ -9445,12 +7985,7 @@ SELECT cast(1 as decimal(3, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 -- !query @@ -9459,12 +7994,7 @@ SELECT cast(1 as decimal(5, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 -- !query @@ -9473,12 +8003,7 @@ SELECT cast(1 as decimal(10, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -9487,12 +8012,7 @@ SELECT cast(1 as decimal(20, 0)) > cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 -- !query @@ -9501,12 +8021,7 @@ SELECT cast(1 as decimal(3, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 -- !query @@ -9515,12 +8030,7 @@ SELECT cast(1 as decimal(5, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 -- !query @@ -9529,12 +8039,7 @@ SELECT cast(1 as decimal(10, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -9543,12 +8048,7 @@ SELECT cast(1 as decimal(20, 0)) > cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) > CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 -- !query @@ -9781,12 +8281,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -9795,12 +8290,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -9809,12 +8299,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -9823,12 +8308,7 @@ SELECT cast('1' as binary) >= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) >= CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -9837,12 +8317,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(3, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 -- !query @@ -9851,12 +8326,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(5, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 -- !query @@ -9865,12 +8335,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(10, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -9879,12 +8344,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) >= cast(1 as decimal(20, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) >= CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 -- !query @@ -9893,12 +8353,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 -- !query @@ -9907,12 +8362,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 -- !query @@ -9921,12 +8371,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -9935,12 +8380,7 @@ SELECT cast('2017-12-11 09:30:00' as date) >= cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) >= CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 -- !query @@ -10205,12 +8645,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -10219,12 +8654,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -10233,12 +8663,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -10247,12 +8672,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -10261,12 +8681,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST(1 AS BOOLEAN))' (decimal(3,0) and boolean).; line 1 pos 7 -- !query @@ -10275,12 +8690,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST(1 AS BOOLEAN))' (decimal(5,0) and boolean).; line 1 pos 7 -- !query @@ -10289,12 +8699,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 -- !query @@ -10303,12 +8708,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST(1 AS BOOLEAN))' (decimal(20,0) and boolean).; line 1 pos 7 -- !query @@ -10317,12 +8717,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 -- !query @@ -10331,12 +8726,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 -- !query @@ -10345,12 +8735,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -10359,12 +8744,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 -- !query @@ -10373,12 +8753,7 @@ SELECT cast(1 as decimal(3, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 -- !query @@ -10387,12 +8762,7 @@ SELECT cast(1 as decimal(5, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 -- !query @@ -10401,12 +8771,7 @@ SELECT cast(1 as decimal(10, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -10415,12 +8780,7 @@ SELECT cast(1 as decimal(20, 0)) >= cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) >= CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 -- !query @@ -10653,12 +9013,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(3,0)))' (binary and decimal(3,0)).; line 1 pos 7 -- !query @@ -10667,12 +9022,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(5,0)))' (binary and decimal(5,0)).; line 1 pos 7 -- !query @@ -10681,12 +9031,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -10695,12 +9040,7 @@ SELECT cast('1' as binary) <> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) = CAST(1 AS DECIMAL(20,0)))' (binary and decimal(20,0)).; line 1 pos 7 -- !query @@ -10709,12 +9049,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(3, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(3,0)))' (timestamp and decimal(3,0)).; line 1 pos 7 -- !query @@ -10723,12 +9058,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(5, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(5,0)))' (timestamp and decimal(5,0)).; line 1 pos 7 -- !query @@ -10737,12 +9067,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(10, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -10751,12 +9076,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) <> cast(1 as decimal(20, 0)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) = CAST(1 AS DECIMAL(20,0)))' (timestamp and decimal(20,0)).; line 1 pos 7 -- !query @@ -10765,12 +9085,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(3, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(3,0)))' (date and decimal(3,0)).; line 1 pos 7 -- !query @@ -10779,12 +9094,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(5, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(5,0)))' (date and decimal(5,0)).; line 1 pos 7 -- !query @@ -10793,12 +9103,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -10807,12 +9112,7 @@ SELECT cast('2017-12-11 09:30:00' as date) <> cast(1 as decimal(20, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) = CAST(1 AS DECIMAL(20,0)))' (date and decimal(20,0)).; line 1 pos 7 -- !query @@ -11077,12 +9377,7 @@ SELECT cast(1 as decimal(3, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('1' AS BINARY))' (decimal(3,0) and binary).; line 1 pos 7 -- !query @@ -11091,12 +9386,7 @@ SELECT cast(1 as decimal(5, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('1' AS BINARY))' (decimal(5,0) and binary).; line 1 pos 7 -- !query @@ -11105,12 +9395,7 @@ SELECT cast(1 as decimal(10, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -11119,12 +9404,7 @@ SELECT cast(1 as decimal(20, 0)) <> cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('1' AS BINARY))' (decimal(20,0) and binary).; line 1 pos 7 -- !query @@ -11165,12 +9445,7 @@ SELECT cast(1 as decimal(3, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(3,0) and timestamp).; line 1 pos 7 -- !query @@ -11179,12 +9454,7 @@ SELECT cast(1 as decimal(5, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(5,0) and timestamp).; line 1 pos 7 -- !query @@ -11193,12 +9463,7 @@ SELECT cast(1 as decimal(10, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -11207,12 +9472,7 @@ SELECT cast(1 as decimal(20, 0)) <> cast('2017-12-11 09:30:00.0' as timestamp) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(20,0) and timestamp).; line 1 pos 7 -- !query @@ -11221,12 +9481,7 @@ SELECT cast(1 as decimal(3, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(3,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(3,0) and date).; line 1 pos 7 -- !query @@ -11235,12 +9490,7 @@ SELECT cast(1 as decimal(5, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(5,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(5,0) and date).; line 1 pos 7 -- !query @@ -11249,12 +9499,7 @@ SELECT cast(1 as decimal(10, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -11263,9 +9508,4 @@ SELECT cast(1 as decimal(20, 0)) <> cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(20,0)) = CAST('2017-12-11 09:30:00' AS DATE))' (decimal(20,0) and date).; line 1 pos 7 diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out index 39705d1854fb4..f08d2255f1840 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/division.sql.out @@ -77,12 +77,7 @@ SELECT cast(1 as tinyint) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS TINYINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('1' AS BINARY))' (tinyint and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS TINYINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('1' AS BINARY))' (tinyint and binary).; line 1 pos 7 -- !query @@ -91,12 +86,7 @@ SELECT cast(1 as tinyint) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS TINYINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST(1 AS BOOLEAN))' (tinyint and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS TINYINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST(1 AS BOOLEAN))' (tinyint and boolean).; line 1 pos 7 -- !query @@ -105,12 +95,7 @@ SELECT cast(1 as tinyint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (tinyint and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (tinyint and timestamp).; line 1 pos 7 -- !query @@ -119,12 +104,7 @@ SELECT cast(1 as tinyint) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00' AS DATE))' (tinyint and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS TINYINT) / CAST('2017-12-11 09:30:00' AS DATE))' (tinyint and date).; line 1 pos 7 -- !query @@ -197,12 +177,7 @@ SELECT cast(1 as smallint) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS SMALLINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('1' AS BINARY))' (smallint and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS SMALLINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('1' AS BINARY))' (smallint and binary).; line 1 pos 7 -- !query @@ -211,12 +186,7 @@ SELECT cast(1 as smallint) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS SMALLINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST(1 AS BOOLEAN))' (smallint and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS SMALLINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST(1 AS BOOLEAN))' (smallint and boolean).; line 1 pos 7 -- !query @@ -225,12 +195,7 @@ SELECT cast(1 as smallint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (smallint and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (smallint and timestamp).; line 1 pos 7 -- !query @@ -239,12 +204,7 @@ SELECT cast(1 as smallint) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00' AS DATE))' (smallint and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS SMALLINT) / CAST('2017-12-11 09:30:00' AS DATE))' (smallint and date).; line 1 pos 7 -- !query @@ -317,12 +277,7 @@ SELECT cast(1 as int) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS INT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('1' AS BINARY))' (int and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS INT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('1' AS BINARY))' (int and binary).; line 1 pos 7 -- !query @@ -331,12 +286,7 @@ SELECT cast(1 as int) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS INT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST(1 AS BOOLEAN))' (int and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS INT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST(1 AS BOOLEAN))' (int and boolean).; line 1 pos 7 -- !query @@ -345,12 +295,7 @@ SELECT cast(1 as int) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (int and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (int and timestamp).; line 1 pos 7 -- !query @@ -359,12 +304,7 @@ SELECT cast(1 as int) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00' AS DATE))' (int and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS INT) / CAST('2017-12-11 09:30:00' AS DATE))' (int and date).; line 1 pos 7 -- !query @@ -437,12 +377,7 @@ SELECT cast(1 as bigint) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BIGINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('1' AS BINARY))' (bigint and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BIGINT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('1' AS BINARY))' (bigint and binary).; line 1 pos 7 -- !query @@ -451,12 +386,7 @@ SELECT cast(1 as bigint) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BIGINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST(1 AS BOOLEAN))' (bigint and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BIGINT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST(1 AS BOOLEAN))' (bigint and boolean).; line 1 pos 7 -- !query @@ -465,12 +395,7 @@ SELECT cast(1 as bigint) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (bigint and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (bigint and timestamp).; line 1 pos 7 -- !query @@ -479,12 +404,7 @@ SELECT cast(1 as bigint) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00' AS DATE))' (bigint and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS BIGINT) / CAST('2017-12-11 09:30:00' AS DATE))' (bigint and date).; line 1 pos 7 -- !query @@ -557,12 +477,7 @@ SELECT cast(1 as float) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS FLOAT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('1' AS BINARY))' (float and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS FLOAT) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('1' AS BINARY))' (float and binary).; line 1 pos 7 -- !query @@ -571,12 +486,7 @@ SELECT cast(1 as float) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS FLOAT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST(1 AS BOOLEAN))' (float and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS FLOAT) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST(1 AS BOOLEAN))' (float and boolean).; line 1 pos 7 -- !query @@ -585,12 +495,7 @@ SELECT cast(1 as float) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (float and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (float and timestamp).; line 1 pos 7 -- !query @@ -599,12 +504,7 @@ SELECT cast(1 as float) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00' AS DATE))' (float and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS FLOAT) / CAST('2017-12-11 09:30:00' AS DATE))' (float and date).; line 1 pos 7 -- !query @@ -677,12 +577,7 @@ SELECT cast(1 as double) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 -- !query @@ -691,12 +586,7 @@ SELECT cast(1 as double) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 -- !query @@ -705,12 +595,7 @@ SELECT cast(1 as double) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 -- !query @@ -719,12 +604,7 @@ SELECT cast(1 as double) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7 -- !query @@ -797,12 +677,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('1' AS BINARY))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -811,12 +686,7 @@ SELECT cast(1 as decimal(10, 0)) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST(1 AS BOOLEAN))' (decimal(10,0) and boolean).; line 1 pos 7 -- !query @@ -825,12 +695,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -839,12 +704,7 @@ SELECT cast(1 as decimal(10, 0)) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS DECIMAL(10,0)) / CAST('2017-12-11 09:30:00' AS DATE))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -917,12 +777,7 @@ SELECT cast(1 as string) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 -- !query @@ -931,12 +786,7 @@ SELECT cast(1 as string) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 -- !query @@ -945,12 +795,7 @@ SELECT cast(1 as string) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 -- !query @@ -959,12 +804,7 @@ SELECT cast(1 as string) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(CAST(1 AS STRING) AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7 -- !query @@ -973,12 +813,7 @@ SELECT cast('1' as binary) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS TINYINT))' (binary and tinyint).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS TINYINT))' (binary and tinyint).; line 1 pos 7 -- !query @@ -987,12 +822,7 @@ SELECT cast('1' as binary) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS SMALLINT))' (binary and smallint).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS SMALLINT))' (binary and smallint).; line 1 pos 7 -- !query @@ -1001,12 +831,7 @@ SELECT cast('1' as binary) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS INT))' (binary and int).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS INT))' (binary and int).; line 1 pos 7 -- !query @@ -1015,12 +840,7 @@ SELECT cast('1' as binary) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS BIGINT))' (binary and bigint).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS BIGINT))' (binary and bigint).; line 1 pos 7 -- !query @@ -1029,12 +849,7 @@ SELECT cast('1' as binary) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS FLOAT))' (binary and float).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS FLOAT))' (binary and float).; line 1 pos 7 -- !query @@ -1043,12 +858,7 @@ SELECT cast('1' as binary) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DOUBLE))' (binary and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DOUBLE))' (binary and double).; line 1 pos 7 -- !query @@ -1057,12 +867,7 @@ SELECT cast('1' as binary) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS DECIMAL(10,0)))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -1071,12 +876,7 @@ SELECT cast('1' as binary) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(CAST(1 AS STRING) AS DOUBLE))' (binary and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(CAST(1 AS STRING) AS DOUBLE))' (binary and double).; line 1 pos 7 -- !query @@ -1085,12 +885,7 @@ SELECT cast('1' as binary) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST('1' AS BINARY))' due to data type mismatch: '(CAST('1' AS BINARY) / CAST('1' AS BINARY))' requires (double or decimal) type, not binary; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST('1' AS BINARY))' due to data type mismatch: '(CAST('1' AS BINARY) / CAST('1' AS BINARY))' requires (double or decimal) type, not binary; line 1 pos 7 -- !query @@ -1099,12 +894,7 @@ SELECT cast('1' as binary) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS BOOLEAN))' (binary and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST(1 AS BOOLEAN))' (binary and boolean).; line 1 pos 7 -- !query @@ -1113,12 +903,7 @@ SELECT cast('1' as binary) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (binary and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (binary and timestamp).; line 1 pos 7 -- !query @@ -1127,12 +912,7 @@ SELECT cast('1' as binary) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00' AS DATE))' (binary and date).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('2017-12-11 09:30:00' AS DATE))' (binary and date).; line 1 pos 7 -- !query @@ -1141,12 +921,7 @@ SELECT cast(1 as boolean) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS TINYINT))' (boolean and tinyint).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS TINYINT))' (boolean and tinyint).; line 1 pos 7 -- !query @@ -1155,12 +930,7 @@ SELECT cast(1 as boolean) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS SMALLINT))' (boolean and smallint).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS SMALLINT))' (boolean and smallint).; line 1 pos 7 -- !query @@ -1169,12 +939,7 @@ SELECT cast(1 as boolean) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS INT))' (boolean and int).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS INT))' (boolean and int).; line 1 pos 7 -- !query @@ -1183,12 +948,7 @@ SELECT cast(1 as boolean) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS BIGINT))' (boolean and bigint).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS BIGINT))' (boolean and bigint).; line 1 pos 7 -- !query @@ -1197,12 +957,7 @@ SELECT cast(1 as boolean) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS FLOAT))' (boolean and float).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS FLOAT))' (boolean and float).; line 1 pos 7 -- !query @@ -1211,12 +966,7 @@ SELECT cast(1 as boolean) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS DOUBLE))' (boolean and double).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS DOUBLE))' (boolean and double).; line 1 pos 7 -- !query @@ -1225,12 +975,7 @@ SELECT cast(1 as boolean) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS DECIMAL(10,0)))' (boolean and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(1 AS DECIMAL(10,0)))' (boolean and decimal(10,0)).; line 1 pos 7 -- !query @@ -1239,12 +984,7 @@ SELECT cast(1 as boolean) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(CAST(1 AS STRING) AS DOUBLE))' (boolean and double).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST(CAST(1 AS STRING) AS DOUBLE))' (boolean and double).; line 1 pos 7 -- !query @@ -1253,12 +993,7 @@ SELECT cast(1 as boolean) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('1' AS BINARY))' (boolean and binary).; line 1 pos 7 -- !query @@ -1267,12 +1002,7 @@ SELECT cast(1 as boolean) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS BOOLEAN))' due to data type mismatch: '(CAST(1 AS BOOLEAN) / CAST(1 AS BOOLEAN))' requires (double or decimal) type, not boolean; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST(1 AS BOOLEAN))' due to data type mismatch: '(CAST(1 AS BOOLEAN) / CAST(1 AS BOOLEAN))' requires (double or decimal) type, not boolean; line 1 pos 7 -- !query @@ -1281,12 +1011,7 @@ SELECT cast(1 as boolean) / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (boolean and timestamp).; line 1 pos 7 -- !query @@ -1295,12 +1020,7 @@ SELECT cast(1 as boolean) / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('2017-12-11 09:30:00' AS DATE))' (boolean and date).; line 1 pos 7 -- !query @@ -1309,12 +1029,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS TINYINT))' (timestamp and tinyint).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS TINYINT))' (timestamp and tinyint).; line 1 pos 7 -- !query @@ -1323,12 +1038,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS SMALLINT))' (timestamp and smallint).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS SMALLINT))' (timestamp and smallint).; line 1 pos 7 -- !query @@ -1337,12 +1047,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS INT))' (timestamp and int).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS INT))' (timestamp and int).; line 1 pos 7 -- !query @@ -1351,12 +1056,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BIGINT))' (timestamp and bigint).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BIGINT))' (timestamp and bigint).; line 1 pos 7 -- !query @@ -1365,12 +1065,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS FLOAT))' (timestamp and float).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS FLOAT))' (timestamp and float).; line 1 pos 7 -- !query @@ -1379,12 +1074,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DOUBLE))' (timestamp and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DOUBLE))' (timestamp and double).; line 1 pos 7 -- !query @@ -1393,12 +1083,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS DECIMAL(10,0)))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -1407,12 +1092,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(CAST(1 AS STRING) AS DOUBLE))' (timestamp and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(CAST(1 AS STRING) AS DOUBLE))' (timestamp and double).; line 1 pos 7 -- !query @@ -1421,12 +1101,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS BINARY))' (timestamp and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS BINARY))' (timestamp and binary).; line 1 pos 7 -- !query @@ -1435,12 +1110,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BOOLEAN))' (timestamp and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST(1 AS BOOLEAN))' (timestamp and boolean).; line 1 pos 7 -- !query @@ -1449,12 +1119,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' requires (double or decimal) type, not timestamp; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' requires (double or decimal) type, not timestamp; line 1 pos 7 -- !query @@ -1463,12 +1128,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00' AS DATE))' (timestamp and date).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('2017-12-11 09:30:00' AS DATE))' (timestamp and date).; line 1 pos 7 -- !query @@ -1477,12 +1137,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS TINYINT))' (date and tinyint).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS TINYINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS TINYINT))' (date and tinyint).; line 1 pos 7 -- !query @@ -1491,12 +1146,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS SMALLINT))' (date and smallint).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS SMALLINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS SMALLINT))' (date and smallint).; line 1 pos 7 -- !query @@ -1505,12 +1155,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS INT))' (date and int).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS INT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS INT))' (date and int).; line 1 pos 7 -- !query @@ -1519,12 +1164,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BIGINT))' (date and bigint).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BIGINT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BIGINT))' (date and bigint).; line 1 pos 7 -- !query @@ -1533,12 +1173,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS FLOAT))' (date and float).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS FLOAT))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS FLOAT))' (date and float).; line 1 pos 7 -- !query @@ -1547,12 +1182,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DOUBLE))' (date and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DOUBLE))' (date and double).; line 1 pos 7 -- !query @@ -1561,12 +1191,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS DECIMAL(10,0)))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -1575,12 +1200,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(CAST(1 AS STRING) AS DOUBLE))' (date and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(CAST(1 AS STRING) AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(CAST(1 AS STRING) AS DOUBLE))' (date and double).; line 1 pos 7 -- !query @@ -1589,12 +1209,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS BINARY))' (date and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS BINARY))' (date and binary).; line 1 pos 7 -- !query @@ -1603,12 +1218,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BOOLEAN))' (date and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST(1 AS BOOLEAN))' (date and boolean).; line 1 pos 7 -- !query @@ -1617,12 +1227,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast('2017-12-11 09:30:00.0' as tim struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (date and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (date and timestamp).; line 1 pos 7 -- !query @@ -1631,9 +1236,4 @@ SELECT cast('2017-12-11 09:30:00' as date) / cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00' AS DATE))' requires (double or decimal) type, not date; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('2017-12-11 09:30:00' AS DATE))' requires (double or decimal) type, not date; line 1 pos 7 diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out index 39e1e9e48ad35..2ef149f5f379c 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/ifCoercion.sql.out @@ -77,12 +77,7 @@ SELECT IF(true, cast(1 as tinyint), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2' AS BINARY)))' (tinyint and binary).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2' AS BINARY)))' (tinyint and binary).; line 1 pos 7 -- !query @@ -91,12 +86,7 @@ SELECT IF(true, cast(1 as tinyint), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST(2 AS BOOLEAN)))' (tinyint and boolean).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST(2 AS BOOLEAN)))' (tinyint and boolean).; line 1 pos 7 -- !query @@ -105,12 +95,7 @@ SELECT IF(true, cast(1 as tinyint), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (tinyint and timestamp).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (tinyint and timestamp).; line 1 pos 7 -- !query @@ -119,12 +104,7 @@ SELECT IF(true, cast(1 as tinyint), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' (tinyint and date).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' (tinyint and date).; line 1 pos 7 -- !query @@ -197,12 +177,7 @@ SELECT IF(true, cast(1 as smallint), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2' AS BINARY)))' (smallint and binary).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2' AS BINARY)))' (smallint and binary).; line 1 pos 7 -- !query @@ -211,12 +186,7 @@ SELECT IF(true, cast(1 as smallint), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BOOLEAN)))' (smallint and boolean).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST(2 AS BOOLEAN)))' (smallint and boolean).; line 1 pos 7 -- !query @@ -225,12 +195,7 @@ SELECT IF(true, cast(1 as smallint), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (smallint and timestamp).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (smallint and timestamp).; line 1 pos 7 -- !query @@ -239,12 +204,7 @@ SELECT IF(true, cast(1 as smallint), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' (smallint and date).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' (smallint and date).; line 1 pos 7 -- !query @@ -317,12 +277,7 @@ SELECT IF(true, cast(1 as int), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS INT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2' AS BINARY)))' (int and binary).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS INT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2' AS BINARY)))' (int and binary).; line 1 pos 7 -- !query @@ -331,12 +286,7 @@ SELECT IF(true, cast(1 as int), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS INT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST(2 AS BOOLEAN)))' (int and boolean).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS INT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST(2 AS BOOLEAN)))' (int and boolean).; line 1 pos 7 -- !query @@ -345,12 +295,7 @@ SELECT IF(true, cast(1 as int), cast('2017-12-11 09:30:00.0' as timestamp)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (int and timestamp).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (int and timestamp).; line 1 pos 7 -- !query @@ -359,12 +304,7 @@ SELECT IF(true, cast(1 as int), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' (int and date).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' (int and date).; line 1 pos 7 -- !query @@ -437,12 +377,7 @@ SELECT IF(true, cast(1 as bigint), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2' AS BINARY)))' (bigint and binary).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2' AS BINARY)))' (bigint and binary).; line 1 pos 7 -- !query @@ -451,12 +386,7 @@ SELECT IF(true, cast(1 as bigint), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST(2 AS BOOLEAN)))' (bigint and boolean).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST(2 AS BOOLEAN)))' (bigint and boolean).; line 1 pos 7 -- !query @@ -465,12 +395,7 @@ SELECT IF(true, cast(1 as bigint), cast('2017-12-11 09:30:00.0' as timestamp)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (bigint and timestamp).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (bigint and timestamp).; line 1 pos 7 -- !query @@ -479,12 +404,7 @@ SELECT IF(true, cast(1 as bigint), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' (bigint and date).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' (bigint and date).; line 1 pos 7 -- !query @@ -557,12 +477,7 @@ SELECT IF(true, cast(1 as float), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2' AS BINARY)))' (float and binary).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2' AS BINARY)))' (float and binary).; line 1 pos 7 -- !query @@ -571,12 +486,7 @@ SELECT IF(true, cast(1 as float), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST(2 AS BOOLEAN)))' (float and boolean).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST(2 AS BOOLEAN)))' (float and boolean).; line 1 pos 7 -- !query @@ -585,12 +495,7 @@ SELECT IF(true, cast(1 as float), cast('2017-12-11 09:30:00.0' as timestamp)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (float and timestamp).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (float and timestamp).; line 1 pos 7 -- !query @@ -599,12 +504,7 @@ SELECT IF(true, cast(1 as float), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' (float and date).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' (float and date).; line 1 pos 7 -- !query @@ -677,12 +577,7 @@ SELECT IF(true, cast(1 as double), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2' AS BINARY)))' (double and binary).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2' AS BINARY)))' (double and binary).; line 1 pos 7 -- !query @@ -691,12 +586,7 @@ SELECT IF(true, cast(1 as double), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST(2 AS BOOLEAN)))' (double and boolean).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST(2 AS BOOLEAN)))' (double and boolean).; line 1 pos 7 -- !query @@ -705,12 +595,7 @@ SELECT IF(true, cast(1 as double), cast('2017-12-11 09:30:00.0' as timestamp)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (double and timestamp).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (double and timestamp).; line 1 pos 7 -- !query @@ -719,12 +604,7 @@ SELECT IF(true, cast(1 as double), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' (double and date).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' (double and date).; line 1 pos 7 -- !query @@ -797,12 +677,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2' AS BINARY)))' (decimal(10,0) and binary).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2' AS BINARY)))' (decimal(10,0) and binary).; line 1 pos 7 -- !query @@ -811,12 +686,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS BOOLEAN)))' (decimal(10,0) and boolean).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST(2 AS BOOLEAN)))' (decimal(10,0) and boolean).; line 1 pos 7 -- !query @@ -825,12 +695,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00.0' as times struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (decimal(10,0) and timestamp).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (decimal(10,0) and timestamp).; line 1 pos 7 -- !query @@ -839,12 +704,7 @@ SELECT IF(true, cast(1 as decimal(10, 0)), cast('2017-12-11 09:30:00' as date)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' (decimal(10,0) and date).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' (decimal(10,0) and date).; line 1 pos 7 -- !query @@ -917,12 +777,7 @@ SELECT IF(true, cast(1 as string), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS STRING), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS STRING), CAST('2' AS BINARY)))' (string and binary).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS STRING), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS STRING), CAST('2' AS BINARY)))' (string and binary).; line 1 pos 7 -- !query @@ -931,12 +786,7 @@ SELECT IF(true, cast(1 as string), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))' (string and boolean).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS STRING), CAST(2 AS BOOLEAN)))' (string and boolean).; line 1 pos 7 -- !query @@ -961,12 +811,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS TINYINT)))' (binary and tinyint).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS TINYINT)))' (binary and tinyint).; line 1 pos 7 -- !query @@ -975,12 +820,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS SMALLINT)))' (binary and smallint).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS SMALLINT)))' (binary and smallint).; line 1 pos 7 -- !query @@ -989,12 +829,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS INT)))' (binary and int).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS INT)))' (binary and int).; line 1 pos 7 -- !query @@ -1003,12 +838,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS BIGINT)))' (binary and bigint).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS BIGINT)))' (binary and bigint).; line 1 pos 7 -- !query @@ -1017,12 +847,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS FLOAT)))' (binary and float).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS FLOAT)))' (binary and float).; line 1 pos 7 -- !query @@ -1031,12 +856,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS DOUBLE)))' (binary and double).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS DOUBLE)))' (binary and double).; line 1 pos 7 -- !query @@ -1045,12 +865,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS DECIMAL(10,0))))' (binary and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS DECIMAL(10,0))))' (binary and decimal(10,0)).; line 1 pos 7 -- !query @@ -1059,12 +874,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS STRING)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS STRING)))' (binary and string).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS STRING)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS STRING)))' (binary and string).; line 1 pos 7 -- !query @@ -1081,12 +891,7 @@ SELECT IF(true, cast('1' as binary), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS BOOLEAN)))' (binary and boolean).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('1' AS BINARY), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST(2 AS BOOLEAN)))' (binary and boolean).; line 1 pos 7 -- !query @@ -1095,12 +900,7 @@ SELECT IF(true, cast('1' as binary), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (binary and timestamp).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (binary and timestamp).; line 1 pos 7 -- !query @@ -1109,12 +909,7 @@ SELECT IF(true, cast('1' as binary), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' (binary and date).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' (binary and date).; line 1 pos 7 -- !query @@ -1123,12 +918,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS TINYINT)))' (boolean and tinyint).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS TINYINT)))' (boolean and tinyint).; line 1 pos 7 -- !query @@ -1137,12 +927,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS SMALLINT)))' (boolean and smallint).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS SMALLINT)))' (boolean and smallint).; line 1 pos 7 -- !query @@ -1151,12 +936,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS INT)))' (boolean and int).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS INT)))' (boolean and int).; line 1 pos 7 -- !query @@ -1165,12 +945,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS BIGINT)))' (boolean and bigint).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS BIGINT)))' (boolean and bigint).; line 1 pos 7 -- !query @@ -1179,12 +954,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS FLOAT)))' (boolean and float).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS FLOAT)))' (boolean and float).; line 1 pos 7 -- !query @@ -1193,12 +963,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DOUBLE)))' (boolean and double).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DOUBLE)))' (boolean and double).; line 1 pos 7 -- !query @@ -1207,12 +972,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DECIMAL(10,0))))' (boolean and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS DECIMAL(10,0))))' (boolean and decimal(10,0)).; line 1 pos 7 -- !query @@ -1221,12 +981,7 @@ SELECT IF(true, cast(1 as boolean), cast(2 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))' (boolean and string).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST(2 AS STRING)))' (boolean and string).; line 1 pos 7 -- !query @@ -1235,12 +990,7 @@ SELECT IF(true, cast(1 as boolean), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2' AS BINARY)))' (boolean and binary).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2' AS BINARY)))' (boolean and binary).; line 1 pos 7 -- !query @@ -1257,12 +1007,7 @@ SELECT IF(true, cast(1 as boolean), cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (boolean and timestamp).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' (boolean and timestamp).; line 1 pos 7 -- !query @@ -1271,12 +1016,7 @@ SELECT IF(true, cast(1 as boolean), cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' (boolean and date).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: differing types in '(IF(true, CAST(1 AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' (boolean and date).; line 1 pos 7 -- !query @@ -1285,12 +1025,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as tinyint)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS TINYINT)))' (timestamp and tinyint).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS TINYINT)))' (timestamp and tinyint).; line 1 pos 7 -- !query @@ -1299,12 +1034,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as smallint)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS SMALLINT)))' (timestamp and smallint).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS SMALLINT)))' (timestamp and smallint).; line 1 pos 7 -- !query @@ -1313,12 +1043,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as int)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS INT)))' (timestamp and int).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS INT)))' (timestamp and int).; line 1 pos 7 -- !query @@ -1327,12 +1052,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as bigint)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BIGINT)))' (timestamp and bigint).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BIGINT)))' (timestamp and bigint).; line 1 pos 7 -- !query @@ -1341,12 +1061,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as float)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS FLOAT)))' (timestamp and float).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS FLOAT)))' (timestamp and float).; line 1 pos 7 -- !query @@ -1355,12 +1070,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as double)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DOUBLE)))' (timestamp and double).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DOUBLE)))' (timestamp and double).; line 1 pos 7 -- !query @@ -1369,12 +1079,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as decimal(10 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DECIMAL(10,0))))' (timestamp and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS DECIMAL(10,0))))' (timestamp and decimal(10,0)).; line 1 pos 7 -- !query @@ -1391,12 +1096,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast('2' as binary)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('2' AS BINARY)))' (timestamp and binary).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('2' AS BINARY)))' (timestamp and binary).; line 1 pos 7 -- !query @@ -1405,12 +1105,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00.0' as timestamp), cast(2 as boolean)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BOOLEAN)))' (timestamp and boolean).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(2 AS BOOLEAN)))' (timestamp and boolean).; line 1 pos 7 -- !query @@ -1435,12 +1130,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS TINYINT)))' (date and tinyint).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS TINYINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS TINYINT)))' (date and tinyint).; line 1 pos 7 -- !query @@ -1449,12 +1139,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS SMALLINT)))' (date and smallint).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS SMALLINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS SMALLINT)))' (date and smallint).; line 1 pos 7 -- !query @@ -1463,12 +1148,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS INT)))' (date and int).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS INT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS INT)))' (date and int).; line 1 pos 7 -- !query @@ -1477,12 +1157,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BIGINT)))' (date and bigint).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BIGINT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BIGINT)))' (date and bigint).; line 1 pos 7 -- !query @@ -1491,12 +1166,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS FLOAT)))' (date and float).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS FLOAT)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS FLOAT)))' (date and float).; line 1 pos 7 -- !query @@ -1505,12 +1175,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DOUBLE)))' (date and double).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DOUBLE)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DOUBLE)))' (date and double).; line 1 pos 7 -- !query @@ -1519,12 +1184,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as decimal(10, 0))) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DECIMAL(10,0))))' (date and decimal(10,0)).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS DECIMAL(10,0))))' (date and decimal(10,0)).; line 1 pos 7 -- !query @@ -1541,12 +1201,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST('2' AS BINARY)))' (date and binary).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST('2' AS BINARY)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST('2' AS BINARY)))' (date and binary).; line 1 pos 7 -- !query @@ -1555,12 +1210,7 @@ SELECT IF(true, cast('2017-12-12 09:30:00' as date), cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BOOLEAN)))' (date and boolean).; line 1 pos 7" - } -} +cannot resolve '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BOOLEAN)))' due to data type mismatch: differing types in '(IF(true, CAST('2017-12-12 09:30:00' AS DATE), CAST(2 AS BOOLEAN)))' (date and boolean).; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out index 6d3082d58418e..d81ae31f1b815 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/inConversion.sql.out @@ -77,12 +77,7 @@ SELECT cast(1 as tinyint) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS TINYINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: tinyint != binary; line 1 pos 26" - } -} +cannot resolve '(CAST(1 AS TINYINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: tinyint != binary; line 1 pos 26 -- !query @@ -91,12 +86,7 @@ SELECT cast(1 as tinyint) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: tinyint != boolean; line 1 pos 26" - } -} +cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: tinyint != boolean; line 1 pos 26 -- !query @@ -105,12 +95,7 @@ SELECT cast(1 as tinyint) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS TINYINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: tinyint != timestamp; line 1 pos 26" - } -} +cannot resolve '(CAST(1 AS TINYINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: tinyint != timestamp; line 1 pos 26 -- !query @@ -119,12 +104,7 @@ SELECT cast(1 as tinyint) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS TINYINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: tinyint != date; line 1 pos 26" - } -} +cannot resolve '(CAST(1 AS TINYINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: tinyint != date; line 1 pos 26 -- !query @@ -197,12 +177,7 @@ SELECT cast(1 as smallint) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: smallint != binary; line 1 pos 27" - } -} +cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: smallint != binary; line 1 pos 27 -- !query @@ -211,12 +186,7 @@ SELECT cast(1 as smallint) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: smallint != boolean; line 1 pos 27" - } -} +cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: smallint != boolean; line 1 pos 27 -- !query @@ -225,12 +195,7 @@ SELECT cast(1 as smallint) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: smallint != timestamp; line 1 pos 27" - } -} +cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: smallint != timestamp; line 1 pos 27 -- !query @@ -239,12 +204,7 @@ SELECT cast(1 as smallint) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: smallint != date; line 1 pos 27" - } -} +cannot resolve '(CAST(1 AS SMALLINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: smallint != date; line 1 pos 27 -- !query @@ -317,12 +277,7 @@ SELECT cast(1 as int) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS INT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: int != binary; line 1 pos 22" - } -} +cannot resolve '(CAST(1 AS INT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: int != binary; line 1 pos 22 -- !query @@ -331,12 +286,7 @@ SELECT cast(1 as int) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: int != boolean; line 1 pos 22" - } -} +cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: int != boolean; line 1 pos 22 -- !query @@ -345,12 +295,7 @@ SELECT cast(1 as int) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS INT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: int != timestamp; line 1 pos 22" - } -} +cannot resolve '(CAST(1 AS INT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: int != timestamp; line 1 pos 22 -- !query @@ -359,12 +304,7 @@ SELECT cast(1 as int) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS INT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: int != date; line 1 pos 22" - } -} +cannot resolve '(CAST(1 AS INT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: int != date; line 1 pos 22 -- !query @@ -437,12 +377,7 @@ SELECT cast(1 as bigint) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BIGINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: bigint != binary; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS BIGINT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: bigint != binary; line 1 pos 25 -- !query @@ -451,12 +386,7 @@ SELECT cast(1 as bigint) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: bigint != boolean; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: bigint != boolean; line 1 pos 25 -- !query @@ -465,12 +395,7 @@ SELECT cast(1 as bigint) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BIGINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: bigint != timestamp; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS BIGINT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: bigint != timestamp; line 1 pos 25 -- !query @@ -479,12 +404,7 @@ SELECT cast(1 as bigint) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BIGINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: bigint != date; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS BIGINT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: bigint != date; line 1 pos 25 -- !query @@ -557,12 +477,7 @@ SELECT cast(1 as float) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS FLOAT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: float != binary; line 1 pos 24" - } -} +cannot resolve '(CAST(1 AS FLOAT) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: float != binary; line 1 pos 24 -- !query @@ -571,12 +486,7 @@ SELECT cast(1 as float) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: float != boolean; line 1 pos 24" - } -} +cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: float != boolean; line 1 pos 24 -- !query @@ -585,12 +495,7 @@ SELECT cast(1 as float) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS FLOAT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: float != timestamp; line 1 pos 24" - } -} +cannot resolve '(CAST(1 AS FLOAT) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: float != timestamp; line 1 pos 24 -- !query @@ -599,12 +504,7 @@ SELECT cast(1 as float) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS FLOAT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: float != date; line 1 pos 24" - } -} +cannot resolve '(CAST(1 AS FLOAT) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: float != date; line 1 pos 24 -- !query @@ -677,12 +577,7 @@ SELECT cast(1 as double) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: double != binary; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: double != binary; line 1 pos 25 -- !query @@ -691,12 +586,7 @@ SELECT cast(1 as double) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: double != boolean; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: double != boolean; line 1 pos 25 -- !query @@ -705,12 +595,7 @@ SELECT cast(1 as double) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: double != timestamp; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: double != timestamp; line 1 pos 25 -- !query @@ -719,12 +604,7 @@ SELECT cast(1 as double) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: double != date; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS DOUBLE) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: double != date; line 1 pos 25 -- !query @@ -797,12 +677,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != binary; line 1 pos 33" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != binary; line 1 pos 33 -- !query @@ -811,12 +686,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != boolean; line 1 pos 33" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != boolean; line 1 pos 33 -- !query @@ -825,12 +695,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast('2017-12-11 09:30:00.0' as timestamp)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != timestamp; line 1 pos 33" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != timestamp; line 1 pos 33 -- !query @@ -839,12 +704,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != date; line 1 pos 33" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != date; line 1 pos 33 -- !query @@ -917,12 +777,7 @@ SELECT cast(1 as string) in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS STRING) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: string != binary; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS STRING) IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: string != binary; line 1 pos 25 -- !query @@ -931,12 +786,7 @@ SELECT cast(1 as string) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: string != boolean; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: string != boolean; line 1 pos 25 -- !query @@ -961,12 +811,7 @@ SELECT cast('1' as binary) in (cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: binary != tinyint; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: binary != tinyint; line 1 pos 27 -- !query @@ -975,12 +820,7 @@ SELECT cast('1' as binary) in (cast(1 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: binary != smallint; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: binary != smallint; line 1 pos 27 -- !query @@ -989,12 +829,7 @@ SELECT cast('1' as binary) in (cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: binary != int; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: binary != int; line 1 pos 27 -- !query @@ -1003,12 +838,7 @@ SELECT cast('1' as binary) in (cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: binary != bigint; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: binary != bigint; line 1 pos 27 -- !query @@ -1017,12 +847,7 @@ SELECT cast('1' as binary) in (cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: binary != float; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: binary != float; line 1 pos 27 -- !query @@ -1031,12 +856,7 @@ SELECT cast('1' as binary) in (cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: binary != double; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: binary != double; line 1 pos 27 -- !query @@ -1045,12 +865,7 @@ SELECT cast('1' as binary) in (cast(1 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: binary != decimal(10,0); line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: binary != decimal(10,0); line 1 pos 27 -- !query @@ -1059,12 +874,7 @@ SELECT cast('1' as binary) in (cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: binary != string; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: binary != string; line 1 pos 27 -- !query @@ -1081,12 +891,7 @@ SELECT cast('1' as binary) in (cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: binary != boolean; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: binary != boolean; line 1 pos 27 -- !query @@ -1095,12 +900,7 @@ SELECT cast('1' as binary) in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: binary != timestamp; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: binary != timestamp; line 1 pos 27 -- !query @@ -1109,12 +909,7 @@ SELECT cast('1' as binary) in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: binary != date; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: binary != date; line 1 pos 27 -- !query @@ -1123,12 +918,7 @@ SELECT true in (cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true IN (CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: boolean != tinyint; line 1 pos 12" - } -} +cannot resolve '(true IN (CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: boolean != tinyint; line 1 pos 12 -- !query @@ -1137,12 +927,7 @@ SELECT true in (cast(1 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true IN (CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: boolean != smallint; line 1 pos 12" - } -} +cannot resolve '(true IN (CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: boolean != smallint; line 1 pos 12 -- !query @@ -1151,12 +936,7 @@ SELECT true in (cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true IN (CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: boolean != int; line 1 pos 12" - } -} +cannot resolve '(true IN (CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: boolean != int; line 1 pos 12 -- !query @@ -1165,12 +945,7 @@ SELECT true in (cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true IN (CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: boolean != bigint; line 1 pos 12" - } -} +cannot resolve '(true IN (CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: boolean != bigint; line 1 pos 12 -- !query @@ -1179,12 +954,7 @@ SELECT true in (cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true IN (CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: boolean != float; line 1 pos 12" - } -} +cannot resolve '(true IN (CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: boolean != float; line 1 pos 12 -- !query @@ -1193,12 +963,7 @@ SELECT true in (cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true IN (CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: boolean != double; line 1 pos 12" - } -} +cannot resolve '(true IN (CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: boolean != double; line 1 pos 12 -- !query @@ -1207,12 +972,7 @@ SELECT true in (cast(1 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true IN (CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: boolean != decimal(10,0); line 1 pos 12" - } -} +cannot resolve '(true IN (CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: boolean != decimal(10,0); line 1 pos 12 -- !query @@ -1221,12 +981,7 @@ SELECT true in (cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true IN (CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: boolean != string; line 1 pos 12" - } -} +cannot resolve '(true IN (CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: boolean != string; line 1 pos 12 -- !query @@ -1235,12 +990,7 @@ SELECT true in (cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: boolean != binary; line 1 pos 12" - } -} +cannot resolve '(true IN (CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: boolean != binary; line 1 pos 12 -- !query @@ -1257,12 +1007,7 @@ SELECT true in (cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: boolean != timestamp; line 1 pos 12" - } -} +cannot resolve '(true IN (CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: boolean != timestamp; line 1 pos 12 -- !query @@ -1271,12 +1016,7 @@ SELECT true in (cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(true IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: boolean != date; line 1 pos 12" - } -} +cannot resolve '(true IN (CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: boolean != date; line 1 pos 12 -- !query @@ -1285,12 +1025,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != tinyint; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != tinyint; line 1 pos 50 -- !query @@ -1299,12 +1034,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as smallint)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != smallint; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != smallint; line 1 pos 50 -- !query @@ -1313,12 +1043,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS INT)))' due to data type mismatch: Arguments must be same type but were: timestamp != int; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS INT)))' due to data type mismatch: Arguments must be same type but were: timestamp != int; line 1 pos 50 -- !query @@ -1327,12 +1052,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != bigint; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != bigint; line 1 pos 50 -- !query @@ -1341,12 +1061,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: timestamp != float; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: timestamp != float; line 1 pos 50 -- !query @@ -1355,12 +1070,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: timestamp != double; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: timestamp != double; line 1 pos 50 -- !query @@ -1369,12 +1079,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as decimal(10, 0))) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: timestamp != decimal(10,0); line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: timestamp != decimal(10,0); line 1 pos 50 -- !query @@ -1391,12 +1096,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2' as binary)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: timestamp != binary; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: timestamp != binary; line 1 pos 50 -- !query @@ -1405,12 +1105,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: timestamp != boolean; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST(2 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: timestamp != boolean; line 1 pos 50 -- !query @@ -1435,12 +1130,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: date != tinyint; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: date != tinyint; line 1 pos 43 -- !query @@ -1449,12 +1139,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: date != smallint; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: date != smallint; line 1 pos 43 -- !query @@ -1463,12 +1148,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS INT)))' due to data type mismatch: Arguments must be same type but were: date != int; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS INT)))' due to data type mismatch: Arguments must be same type but were: date != int; line 1 pos 43 -- !query @@ -1477,12 +1157,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: date != bigint; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: date != bigint; line 1 pos 43 -- !query @@ -1491,12 +1166,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: date != float; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: date != float; line 1 pos 43 -- !query @@ -1505,12 +1175,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: date != double; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: date != double; line 1 pos 43 -- !query @@ -1519,12 +1184,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as decimal(10, 0))) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: date != decimal(10,0); line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: date != decimal(10,0); line 1 pos 43 -- !query @@ -1541,12 +1201,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: date != binary; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: date != binary; line 1 pos 43 -- !query @@ -1555,12 +1210,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast(2 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: date != boolean; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST(2 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: date != boolean; line 1 pos 43 -- !query @@ -1649,12 +1299,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: tinyint != binary; line 1 pos 26" - } -} +cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: tinyint != binary; line 1 pos 26 -- !query @@ -1663,12 +1308,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: tinyint != boolean; line 1 pos 26" - } -} +cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: tinyint != boolean; line 1 pos 26 -- !query @@ -1677,12 +1317,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast('2017-12-11 09:30:00.0' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: tinyint != timestamp; line 1 pos 26" - } -} +cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: tinyint != timestamp; line 1 pos 26 -- !query @@ -1691,12 +1326,7 @@ SELECT cast(1 as tinyint) in (cast(1 as tinyint), cast('2017-12-11 09:30:00' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: tinyint != date; line 1 pos 26" - } -} +cannot resolve '(CAST(1 AS TINYINT) IN (CAST(1 AS TINYINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: tinyint != date; line 1 pos 26 -- !query @@ -1769,12 +1399,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: smallint != binary; line 1 pos 27" - } -} +cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: smallint != binary; line 1 pos 27 -- !query @@ -1783,12 +1408,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: smallint != boolean; line 1 pos 27" - } -} +cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: smallint != boolean; line 1 pos 27 -- !query @@ -1797,12 +1417,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: smallint != timestamp; line 1 pos 27" - } -} +cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: smallint != timestamp; line 1 pos 27 -- !query @@ -1811,12 +1426,7 @@ SELECT cast(1 as smallint) in (cast(1 as smallint), cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: smallint != date; line 1 pos 27" - } -} +cannot resolve '(CAST(1 AS SMALLINT) IN (CAST(1 AS SMALLINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: smallint != date; line 1 pos 27 -- !query @@ -1889,12 +1499,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: int != binary; line 1 pos 22" - } -} +cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: int != binary; line 1 pos 22 -- !query @@ -1903,12 +1508,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: int != boolean; line 1 pos 22" - } -} +cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: int != boolean; line 1 pos 22 -- !query @@ -1917,12 +1517,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast('2017-12-11 09:30:00.0' as timest struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: int != timestamp; line 1 pos 22" - } -} +cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: int != timestamp; line 1 pos 22 -- !query @@ -1931,12 +1526,7 @@ SELECT cast(1 as int) in (cast(1 as int), cast('2017-12-11 09:30:00' as date)) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: int != date; line 1 pos 22" - } -} +cannot resolve '(CAST(1 AS INT) IN (CAST(1 AS INT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: int != date; line 1 pos 22 -- !query @@ -2009,12 +1599,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: bigint != binary; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: bigint != binary; line 1 pos 25 -- !query @@ -2023,12 +1608,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: bigint != boolean; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: bigint != boolean; line 1 pos 25 -- !query @@ -2037,12 +1617,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast('2017-12-11 09:30:00.0' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: bigint != timestamp; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: bigint != timestamp; line 1 pos 25 -- !query @@ -2051,12 +1626,7 @@ SELECT cast(1 as bigint) in (cast(1 as bigint), cast('2017-12-11 09:30:00' as da struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: bigint != date; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS BIGINT) IN (CAST(1 AS BIGINT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: bigint != date; line 1 pos 25 -- !query @@ -2129,12 +1699,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: float != binary; line 1 pos 24" - } -} +cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: float != binary; line 1 pos 24 -- !query @@ -2143,12 +1708,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: float != boolean; line 1 pos 24" - } -} +cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: float != boolean; line 1 pos 24 -- !query @@ -2157,12 +1717,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: float != timestamp; line 1 pos 24" - } -} +cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: float != timestamp; line 1 pos 24 -- !query @@ -2171,12 +1726,7 @@ SELECT cast(1 as float) in (cast(1 as float), cast('2017-12-11 09:30:00' as date struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: float != date; line 1 pos 24" - } -} +cannot resolve '(CAST(1 AS FLOAT) IN (CAST(1 AS FLOAT), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: float != date; line 1 pos 24 -- !query @@ -2249,12 +1799,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: double != binary; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: double != binary; line 1 pos 25 -- !query @@ -2263,12 +1808,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: double != boolean; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: double != boolean; line 1 pos 25 -- !query @@ -2277,12 +1817,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast('2017-12-11 09:30:00.0' as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: double != timestamp; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: double != timestamp; line 1 pos 25 -- !query @@ -2291,12 +1826,7 @@ SELECT cast(1 as double) in (cast(1 as double), cast('2017-12-11 09:30:00' as da struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: double != date; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS DOUBLE) IN (CAST(1 AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: double != date; line 1 pos 25 -- !query @@ -2369,12 +1899,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast('1' as bina struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != binary; line 1 pos 33" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != binary; line 1 pos 33 -- !query @@ -2383,12 +1908,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast(1 as boolea struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != boolean; line 1 pos 33" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != boolean; line 1 pos 33 -- !query @@ -2397,12 +1917,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast('2017-12-11 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != timestamp; line 1 pos 33" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != timestamp; line 1 pos 33 -- !query @@ -2411,12 +1926,7 @@ SELECT cast(1 as decimal(10, 0)) in (cast(1 as decimal(10, 0)), cast('2017-12-11 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != date; line 1 pos 33" - } -} +cannot resolve '(CAST(1 AS DECIMAL(10,0)) IN (CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: decimal(10,0) != date; line 1 pos 33 -- !query @@ -2489,12 +1999,7 @@ SELECT cast(1 as string) in (cast(1 as string), cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: string != binary; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: string != binary; line 1 pos 25 -- !query @@ -2503,12 +2008,7 @@ SELECT cast(1 as string) in (cast(1 as string), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: string != boolean; line 1 pos 25" - } -} +cannot resolve '(CAST(1 AS STRING) IN (CAST(1 AS STRING), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: string != boolean; line 1 pos 25 -- !query @@ -2533,12 +2033,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: binary != tinyint; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: binary != tinyint; line 1 pos 27 -- !query @@ -2547,12 +2042,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as smallint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: binary != smallint; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: binary != smallint; line 1 pos 27 -- !query @@ -2561,12 +2051,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: binary != int; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: binary != int; line 1 pos 27 -- !query @@ -2575,12 +2060,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: binary != bigint; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: binary != bigint; line 1 pos 27 -- !query @@ -2589,12 +2069,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: binary != float; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: binary != float; line 1 pos 27 -- !query @@ -2603,12 +2078,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: binary != double; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: binary != double; line 1 pos 27 -- !query @@ -2617,12 +2087,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as decimal(10, 0))) F struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: binary != decimal(10,0); line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: binary != decimal(10,0); line 1 pos 27 -- !query @@ -2631,12 +2096,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: binary != string; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: binary != string; line 1 pos 27 -- !query @@ -2653,12 +2113,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: binary != boolean; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: binary != boolean; line 1 pos 27 -- !query @@ -2667,12 +2122,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast('2017-12-11 09:30:00.0' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: binary != timestamp; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: binary != timestamp; line 1 pos 27 -- !query @@ -2681,12 +2131,7 @@ SELECT cast('1' as binary) in (cast('1' as binary), cast('2017-12-11 09:30:00' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: binary != date; line 1 pos 27" - } -} +cannot resolve '(CAST('1' AS BINARY) IN (CAST('1' AS BINARY), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: binary != date; line 1 pos 27 -- !query @@ -2695,12 +2140,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as tinyint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: boolean != tinyint; line 1 pos 28" - } -} +cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: boolean != tinyint; line 1 pos 28 -- !query @@ -2709,12 +2149,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as smallint)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: boolean != smallint; line 1 pos 28" - } -} +cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: boolean != smallint; line 1 pos 28 -- !query @@ -2723,12 +2158,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as int)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: boolean != int; line 1 pos 28" - } -} +cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: boolean != int; line 1 pos 28 -- !query @@ -2737,12 +2167,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as bigint)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: boolean != bigint; line 1 pos 28" - } -} +cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: boolean != bigint; line 1 pos 28 -- !query @@ -2751,12 +2176,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as float)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: boolean != float; line 1 pos 28" - } -} +cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: boolean != float; line 1 pos 28 -- !query @@ -2765,12 +2185,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as double)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: boolean != double; line 1 pos 28" - } -} +cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: boolean != double; line 1 pos 28 -- !query @@ -2779,12 +2194,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as decimal(10, 0))) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: boolean != decimal(10,0); line 1 pos 28" - } -} +cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: boolean != decimal(10,0); line 1 pos 28 -- !query @@ -2793,12 +2203,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast(1 as string)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: boolean != string; line 1 pos 28" - } -} +cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST(1 AS STRING)))' due to data type mismatch: Arguments must be same type but were: boolean != string; line 1 pos 28 -- !query @@ -2807,12 +2212,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast('1' as binary)) FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: boolean != binary; line 1 pos 28" - } -} +cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: boolean != binary; line 1 pos 28 -- !query @@ -2829,12 +2229,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast('2017-12-11 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: boolean != timestamp; line 1 pos 28" - } -} +cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP)))' due to data type mismatch: Arguments must be same type but were: boolean != timestamp; line 1 pos 28 -- !query @@ -2843,12 +2238,7 @@ SELECT cast('1' as boolean) in (cast('1' as boolean), cast('2017-12-11 09:30:00' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: boolean != date; line 1 pos 28" - } -} +cannot resolve '(CAST('1' AS BOOLEAN) IN (CAST('1' AS BOOLEAN), CAST('2017-12-11 09:30:00' AS DATE)))' due to data type mismatch: Arguments must be same type but were: boolean != date; line 1 pos 28 -- !query @@ -2857,12 +2247,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != tinyint; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != tinyint; line 1 pos 50 -- !query @@ -2871,12 +2256,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != smallint; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != smallint; line 1 pos 50 -- !query @@ -2885,12 +2265,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: timestamp != int; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: timestamp != int; line 1 pos 50 -- !query @@ -2899,12 +2274,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != bigint; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: timestamp != bigint; line 1 pos 50 -- !query @@ -2913,12 +2283,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: timestamp != float; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: timestamp != float; line 1 pos 50 -- !query @@ -2927,12 +2292,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: timestamp != double; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: timestamp != double; line 1 pos 50 -- !query @@ -2941,12 +2301,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: timestamp != decimal(10,0); line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: timestamp != decimal(10,0); line 1 pos 50 -- !query @@ -2963,12 +2318,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: timestamp != binary; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: timestamp != binary; line 1 pos 50 -- !query @@ -2977,12 +2327,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) in (cast('2017-12-12 09:30:00. struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: timestamp != boolean; line 1 pos 50" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00.0' AS TIMESTAMP) IN (CAST('2017-12-12 09:30:00.0' AS TIMESTAMP), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: timestamp != boolean; line 1 pos 50 -- !query @@ -3007,12 +2352,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: date != tinyint; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS TINYINT)))' due to data type mismatch: Arguments must be same type but were: date != tinyint; line 1 pos 43 -- !query @@ -3021,12 +2361,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: date != smallint; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS SMALLINT)))' due to data type mismatch: Arguments must be same type but were: date != smallint; line 1 pos 43 -- !query @@ -3035,12 +2370,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: date != int; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS INT)))' due to data type mismatch: Arguments must be same type but were: date != int; line 1 pos 43 -- !query @@ -3049,12 +2379,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: date != bigint; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS BIGINT)))' due to data type mismatch: Arguments must be same type but were: date != bigint; line 1 pos 43 -- !query @@ -3063,12 +2388,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: date != float; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS FLOAT)))' due to data type mismatch: Arguments must be same type but were: date != float; line 1 pos 43 -- !query @@ -3077,12 +2397,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: date != double; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS DOUBLE)))' due to data type mismatch: Arguments must be same type but were: date != double; line 1 pos 43 -- !query @@ -3091,12 +2406,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: date != decimal(10,0); line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS DECIMAL(10,0))))' due to data type mismatch: Arguments must be same type but were: date != decimal(10,0); line 1 pos 43 -- !query @@ -3113,12 +2423,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: date != binary; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST('1' AS BINARY)))' due to data type mismatch: Arguments must be same type but were: date != binary; line 1 pos 43 -- !query @@ -3127,12 +2432,7 @@ SELECT cast('2017-12-12 09:30:00' as date) in (cast('2017-12-12 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: date != boolean; line 1 pos 43" - } -} +cannot resolve '(CAST('2017-12-12 09:30:00' AS DATE) IN (CAST('2017-12-12 09:30:00' AS DATE), CAST(1 AS BOOLEAN)))' due to data type mismatch: Arguments must be same type but were: date != boolean; line 1 pos 43 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out index 98a4eb60ec4ab..2f176951df840 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapZipWith.sql.out @@ -82,12 +82,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'map_zip_with(various_maps.decimal_map1, various_maps.decimal_map2, lambdafunction(struct(k, v1, v2), k, v1, v2))' due to argument data type mismatch: The input to function map_zip_with should have been two maps with compatible key types, but the key types are [decimal(36,0), decimal(36,35)].; line 1 pos 7" - } -} +cannot resolve 'map_zip_with(various_maps.decimal_map1, various_maps.decimal_map2, lambdafunction(struct(k, v1, v2), k, v1, v2))' due to argument data type mismatch: The input to function map_zip_with should have been two maps with compatible key types, but the key types are [decimal(36,0), decimal(36,35)].; line 1 pos 7 -- !query @@ -115,12 +110,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'map_zip_with(various_maps.decimal_map2, various_maps.int_map, lambdafunction(struct(k, v1, v2), k, v1, v2))' due to argument data type mismatch: The input to function map_zip_with should have been two maps with compatible key types, but the key types are [decimal(36,35), int].; line 1 pos 7" - } -} +cannot resolve 'map_zip_with(various_maps.decimal_map2, various_maps.int_map, lambdafunction(struct(k, v1, v2), k, v1, v2))' due to argument data type mismatch: The input to function map_zip_with should have been two maps with compatible key types, but the key types are [decimal(36,35), int].; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out index b77b91d08b977..916d32c5e35c7 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out @@ -91,12 +91,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'map_concat(various_maps.tinyint_map1, various_maps.array_map1)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,array>]; line 2 pos 4" - } -} +cannot resolve 'map_concat(various_maps.tinyint_map1, various_maps.array_map1)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,array>]; line 2 pos 4 -- !query @@ -107,12 +102,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'map_concat(various_maps.boolean_map1, various_maps.int_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map]; line 2 pos 4" - } -} +cannot resolve 'map_concat(various_maps.boolean_map1, various_maps.int_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map]; line 2 pos 4 -- !query @@ -123,12 +113,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'map_concat(various_maps.int_map1, various_maps.struct_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,struct>]; line 2 pos 4" - } -} +cannot resolve 'map_concat(various_maps.int_map1, various_maps.struct_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,struct>]; line 2 pos 4 -- !query @@ -139,12 +124,7 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'map_concat(various_maps.struct_map1, various_maps.array_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map,struct>, map,array>]; line 2 pos 4" - } -} +cannot resolve 'map_concat(various_maps.struct_map1, various_maps.array_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map,struct>, map,array>]; line 2 pos 4 -- !query @@ -155,9 +135,4 @@ FROM various_maps struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'map_concat(various_maps.int_map1, various_maps.array_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,array>]; line 2 pos 4" - } -} +cannot resolve 'map_concat(various_maps.int_map1, various_maps.array_map2)' due to data type mismatch: input to function map_concat should all be the same type, but it's [map, map,array>]; line 2 pos 4 diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out index b594a7b482a9a..c24b06768e9fc 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out @@ -77,12 +77,7 @@ SELECT '1' + cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST('1' AS BINARY))' (double and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) + CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 -- !query @@ -91,12 +86,7 @@ SELECT '1' + cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) + CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 -- !query @@ -105,12 +95,7 @@ SELECT '1' + cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) + CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 -- !query @@ -119,12 +104,7 @@ SELECT '1' + cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -197,12 +177,7 @@ SELECT '1' - cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) - CAST('1' AS BINARY))' (double and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) - CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) - CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 -- !query @@ -211,12 +186,7 @@ SELECT '1' - cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) - CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) - CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) - CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 -- !query @@ -225,12 +195,7 @@ SELECT '1' - cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '('1' - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7" - } -} +cannot resolve '('1' - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7 -- !query @@ -311,12 +276,7 @@ SELECT '1' * cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('1' AS BINARY))' (double and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) * CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 -- !query @@ -325,12 +285,7 @@ SELECT '1' * cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) * CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 -- !query @@ -339,12 +294,7 @@ SELECT '1' * cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 -- !query @@ -353,12 +303,7 @@ SELECT '1' * cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) * CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7 -- !query @@ -431,12 +376,7 @@ SELECT '1' / cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) / CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 -- !query @@ -445,12 +385,7 @@ SELECT '1' / cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) / CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 -- !query @@ -459,12 +394,7 @@ SELECT '1' / cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 -- !query @@ -473,12 +403,7 @@ SELECT '1' / cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) / CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7 -- !query @@ -551,12 +476,7 @@ SELECT '1' % cast('1' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('1' AS BINARY))' (double and binary).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) % CAST('1' AS BINARY))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 -- !query @@ -565,12 +485,7 @@ SELECT '1' % cast(1 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) % CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 -- !query @@ -579,12 +494,7 @@ SELECT '1' % cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 -- !query @@ -593,12 +503,7 @@ SELECT '1' % cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in '(CAST('1' AS DOUBLE) % CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7 -- !query @@ -671,12 +576,7 @@ SELECT pmod('1', cast('1' as binary)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('1' AS BINARY))' (double and binary).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('1' AS BINARY))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('1' AS BINARY))' (double and binary).; line 1 pos 7 -- !query @@ -685,12 +585,7 @@ SELECT pmod('1', cast(1 as boolean)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST(1 AS BOOLEAN))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST(1 AS BOOLEAN))' (double and boolean).; line 1 pos 7 -- !query @@ -699,12 +594,7 @@ SELECT pmod('1', cast('2017-12-11 09:30:00.0' as timestamp)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' (double and timestamp).; line 1 pos 7 -- !query @@ -713,12 +603,7 @@ SELECT pmod('1', cast('2017-12-11 09:30:00' as date)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: differing types in 'pmod(CAST('1' AS DOUBLE), CAST('2017-12-11 09:30:00' AS DATE))' (double and date).; line 1 pos 7 -- !query @@ -783,12 +668,7 @@ SELECT cast('1' as binary) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) + CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7 -- !query @@ -797,12 +677,7 @@ SELECT cast(1 as boolean) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) + CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) + CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7 -- !query @@ -811,12 +686,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) + CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 -- !query @@ -825,12 +695,7 @@ SELECT cast('2017-12-11 09:30:00' as date) + '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_add(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -895,12 +760,7 @@ SELECT cast('1' as binary) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) - CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) - CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) - CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7 -- !query @@ -909,12 +769,7 @@ SELECT cast(1 as boolean) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) - CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) - CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) - CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) - CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7 -- !query @@ -923,12 +778,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - '1')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - '1')' due to data type mismatch: argument 2 requires (timestamp or timestamp without time zone) type, however, ''1'' is of string type.; line 1 pos 7 -- !query @@ -937,12 +787,7 @@ SELECT cast('2017-12-11 09:30:00' as date) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7" - } -} +cannot resolve 'date_sub(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7 -- !query @@ -1007,12 +852,7 @@ SELECT cast('1' as binary) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) * CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7 -- !query @@ -1021,12 +861,7 @@ SELECT cast(1 as boolean) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) * CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) * CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7 -- !query @@ -1035,12 +870,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) * CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) * CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 -- !query @@ -1049,12 +879,7 @@ SELECT cast('2017-12-11 09:30:00' as date) * '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) * CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) * CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) * CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7 -- !query @@ -1119,12 +944,7 @@ SELECT cast('1' as binary) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) / CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7 -- !query @@ -1133,12 +953,7 @@ SELECT cast(1 as boolean) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) / CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7 -- !query @@ -1147,12 +962,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) / CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 -- !query @@ -1161,12 +971,7 @@ SELECT cast('2017-12-11 09:30:00' as date) / '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) / CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7 -- !query @@ -1231,12 +1036,7 @@ SELECT cast('1' as binary) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('1' AS BINARY) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('1' AS BINARY) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('1' AS BINARY) % CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7 -- !query @@ -1245,12 +1045,7 @@ SELECT cast(1 as boolean) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST(1 AS BOOLEAN) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) % CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7" - } -} +cannot resolve '(CAST(1 AS BOOLEAN) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST(1 AS BOOLEAN) % CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7 -- !query @@ -1259,12 +1054,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) % CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 -- !query @@ -1273,12 +1063,7 @@ SELECT cast('2017-12-11 09:30:00' as date) % '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7" - } -} +cannot resolve '(CAST('2017-12-11 09:30:00' AS DATE) % CAST('1' AS DOUBLE))' due to data type mismatch: differing types in '(CAST('2017-12-11 09:30:00' AS DATE) % CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7 -- !query @@ -1343,12 +1128,7 @@ SELECT pmod(cast('1' as binary), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('1' AS BINARY), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('1' AS BINARY), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('1' AS BINARY), CAST('1' AS DOUBLE))' (binary and double).; line 1 pos 7 -- !query @@ -1357,12 +1137,7 @@ SELECT pmod(cast(1 as boolean), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST(1 AS BOOLEAN), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS BOOLEAN), CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST(1 AS BOOLEAN), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST(1 AS BOOLEAN), CAST('1' AS DOUBLE))' (boolean and double).; line 1 pos 7 -- !query @@ -1371,12 +1146,7 @@ SELECT pmod(cast('2017-12-11 09:30:00.0' as timestamp), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST('1' AS DOUBLE))' (timestamp and double).; line 1 pos 7 -- !query @@ -1385,12 +1155,7 @@ SELECT pmod(cast('2017-12-11 09:30:00' as date), '1') FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7" - } -} +cannot resolve 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' due to data type mismatch: differing types in 'pmod(CAST('2017-12-11 09:30:00' AS DATE), CAST('1' AS DOUBLE))' (date and double).; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out index 0c5c257229887..45e2cf187a31e 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/stringCastAndExpressions.sql.out @@ -101,12 +101,7 @@ select cast(a as array) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 't.a' due to data type mismatch: cannot cast string to array; line 1 pos 7" - } -} +cannot resolve 't.a' due to data type mismatch: cannot cast string to array; line 1 pos 7 -- !query @@ -115,12 +110,7 @@ select cast(a as struct) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 't.a' due to data type mismatch: cannot cast string to struct; line 1 pos 7" - } -} +cannot resolve 't.a' due to data type mismatch: cannot cast string to struct; line 1 pos 7 -- !query @@ -129,12 +119,7 @@ select cast(a as map) from t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 't.a' due to data type mismatch: cannot cast string to map; line 1 pos 7" - } -} +cannot resolve 't.a' due to data type mismatch: cannot cast string to map; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out index 99ce1a3d8cb63..f830797212a76 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out @@ -85,12 +85,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with tinyint at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with tinyint at same column of first table -- !query @@ -99,12 +94,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with tinyint at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with tinyint at same column of first table -- !query @@ -113,12 +103,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with tinyint at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with tinyint at same column of first table -- !query @@ -127,12 +112,7 @@ SELECT cast(1 as tinyint) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with tinyint at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with tinyint at same column of first table -- !query @@ -213,12 +193,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with smallint at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with smallint at same column of first table -- !query @@ -227,12 +202,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with smallint at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with smallint at same column of first table -- !query @@ -241,12 +211,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with smallint at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with smallint at same column of first table -- !query @@ -255,12 +220,7 @@ SELECT cast(1 as smallint) FROM t UNION SELECT cast('2017-12-11 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with smallint at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with smallint at same column of first table -- !query @@ -341,12 +301,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with int at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with int at same column of first table -- !query @@ -355,12 +310,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with int at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with int at same column of first table -- !query @@ -369,12 +319,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as timest struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with int at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with int at same column of first table -- !query @@ -383,12 +328,7 @@ SELECT cast(1 as int) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with int at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with int at same column of first table -- !query @@ -469,12 +409,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with bigint at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with bigint at same column of first table -- !query @@ -483,12 +418,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with bigint at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with bigint at same column of first table -- !query @@ -497,12 +427,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as tim struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with bigint at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with bigint at same column of first table -- !query @@ -511,12 +436,7 @@ SELECT cast(1 as bigint) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with bigint at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with bigint at same column of first table -- !query @@ -597,12 +517,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with float at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with float at same column of first table -- !query @@ -611,12 +526,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with float at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with float at same column of first table -- !query @@ -625,12 +535,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as time struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with float at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with float at same column of first table -- !query @@ -639,12 +544,7 @@ SELECT cast(1 as float) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with float at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with float at same column of first table -- !query @@ -725,12 +625,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with double at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with double at same column of first table -- !query @@ -739,12 +634,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with double at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with double at same column of first table -- !query @@ -753,12 +643,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as tim struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with double at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with double at same column of first table -- !query @@ -767,12 +652,7 @@ SELECT cast(1 as double) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with double at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with double at same column of first table -- !query @@ -853,12 +733,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with decimal(10,0) at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with decimal(10,0) at same column of first table -- !query @@ -867,12 +742,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with decimal(10,0) at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with decimal(10,0) at same column of first table -- !query @@ -881,12 +751,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast('2017-12-11 09:30:00.0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with decimal(10,0) at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with decimal(10,0) at same column of first table -- !query @@ -895,12 +760,7 @@ SELECT cast(1 as decimal(10, 0)) FROM t UNION SELECT cast('2017-12-11 09:30:00' struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with decimal(10,0) at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with decimal(10,0) at same column of first table -- !query @@ -981,12 +841,7 @@ SELECT cast(1 as string) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with string at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with string at same column of first table -- !query @@ -995,12 +850,7 @@ SELECT cast(1 as string) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with string at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with string at same column of first table -- !query @@ -1027,12 +877,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with binary at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with binary at same column of first table -- !query @@ -1041,12 +886,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with binary at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with binary at same column of first table -- !query @@ -1055,12 +895,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with binary at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with binary at same column of first table -- !query @@ -1069,12 +904,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with binary at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with binary at same column of first table -- !query @@ -1083,12 +913,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with binary at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with binary at same column of first table -- !query @@ -1097,12 +922,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with binary at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with binary at same column of first table -- !query @@ -1111,12 +931,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with binary at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with binary at same column of first table -- !query @@ -1125,12 +940,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is string type which is not compatible with binary at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is string type which is not compatible with binary at same column of first table -- !query @@ -1148,12 +958,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast(2 as boolean) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with binary at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with binary at same column of first table -- !query @@ -1162,12 +967,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with binary at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with binary at same column of first table -- !query @@ -1176,12 +976,7 @@ SELECT cast('1' as binary) FROM t UNION SELECT cast('2017-12-11 09:30:00' as dat struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with binary at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with binary at same column of first table -- !query @@ -1190,12 +985,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as tinyint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with boolean at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with boolean at same column of first table -- !query @@ -1204,12 +994,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as smallint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with boolean at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with boolean at same column of first table -- !query @@ -1218,12 +1003,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as int) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with boolean at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with boolean at same column of first table -- !query @@ -1232,12 +1012,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as bigint) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with boolean at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with boolean at same column of first table -- !query @@ -1246,12 +1021,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as float) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with boolean at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with boolean at same column of first table -- !query @@ -1260,12 +1030,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as double) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with boolean at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with boolean at same column of first table -- !query @@ -1274,12 +1039,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as decimal(10, 0)) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with boolean at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with boolean at same column of first table -- !query @@ -1288,12 +1048,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast(2 as string) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is string type which is not compatible with boolean at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is string type which is not compatible with boolean at same column of first table -- !query @@ -1302,12 +1057,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast('2' as binary) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with boolean at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with boolean at same column of first table -- !query @@ -1324,12 +1074,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with boolean at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is timestamp type which is not compatible with boolean at same column of first table -- !query @@ -1338,12 +1083,7 @@ SELECT cast(1 as boolean) FROM t UNION SELECT cast('2017-12-11 09:30:00' as date struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with boolean at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is date type which is not compatible with boolean at same column of first table -- !query @@ -1352,12 +1092,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with timestamp at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with timestamp at same column of first table -- !query @@ -1366,12 +1101,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with timestamp at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with timestamp at same column of first table -- !query @@ -1380,12 +1110,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with timestamp at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with timestamp at same column of first table -- !query @@ -1394,12 +1119,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with timestamp at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with timestamp at same column of first table -- !query @@ -1408,12 +1128,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with timestamp at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with timestamp at same column of first table -- !query @@ -1422,12 +1137,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with timestamp at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with timestamp at same column of first table -- !query @@ -1436,12 +1146,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with timestamp at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with timestamp at same column of first table -- !query @@ -1459,12 +1164,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast('2' a struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with timestamp at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with timestamp at same column of first table -- !query @@ -1473,12 +1173,7 @@ SELECT cast('2017-12-12 09:30:00.0' as timestamp) FROM t UNION SELECT cast(2 as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with timestamp at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with timestamp at same column of first table -- !query @@ -1505,12 +1200,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as tinyint struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with date at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is tinyint type which is not compatible with date at same column of first table -- !query @@ -1519,12 +1209,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as smallin struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with date at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is smallint type which is not compatible with date at same column of first table -- !query @@ -1533,12 +1218,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as int) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with date at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is int type which is not compatible with date at same column of first table -- !query @@ -1547,12 +1227,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as bigint) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with date at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is bigint type which is not compatible with date at same column of first table -- !query @@ -1561,12 +1236,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as float) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with date at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is float type which is not compatible with date at same column of first table -- !query @@ -1575,12 +1245,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as double) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with date at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is double type which is not compatible with date at same column of first table -- !query @@ -1589,12 +1254,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as decimal struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with date at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is decimal(10,0) type which is not compatible with date at same column of first table -- !query @@ -1612,12 +1272,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast('2' as binar struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with date at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is binary type which is not compatible with date at same column of first table -- !query @@ -1626,12 +1281,7 @@ SELECT cast('2017-12-12 09:30:00' as date) FROM t UNION SELECT cast(2 as boolean struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with date at same column of first table" - } -} +Union can only be performed on tables with the compatible column types. The first column of the second table is boolean type which is not compatible with date at same column of first table -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out index 130c8b8def05e..103465004829d 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/windowFrameCoercion.sql.out @@ -165,12 +165,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast(1 as string) DESC RANGE BETWE struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS STRING) FOLLOWING' due to data type mismatch: The data type of the upper bound 'string' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21" - } -} +cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS STRING) FOLLOWING' due to data type mismatch: The data type of the upper bound 'string' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21 -- !query @@ -179,12 +174,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast('1' as binary) DESC RANGE BET struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS BINARY) FOLLOWING' due to data type mismatch: The data type of the upper bound 'binary' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21" - } -} +cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS BINARY) FOLLOWING' due to data type mismatch: The data type of the upper bound 'binary' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21 -- !query @@ -193,12 +183,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast(1 as boolean) DESC RANGE BETW struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS BOOLEAN) FOLLOWING' due to data type mismatch: The data type of the upper bound 'boolean' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21" - } -} +cannot resolve 'RANGE BETWEEN CURRENT ROW AND CAST(1 AS BOOLEAN) FOLLOWING' due to data type mismatch: The data type of the upper bound 'boolean' does not match the expected data type '(numeric or interval day to second or interval year to month or interval)'.; line 1 pos 21 -- !query @@ -207,12 +192,7 @@ SELECT COUNT(*) OVER (PARTITION BY 1 ORDER BY cast('2017-12-11 09:30:00.0' as ti struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(PARTITION BY 1 ORDER BY CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) DESC NULLS LAST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 21" - } -} +cannot resolve '(PARTITION BY 1 ORDER BY CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) DESC NULLS LAST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 21 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udaf.sql.out b/sql/core/src/test/resources/sql-tests/results/udaf.sql.out index 3d1a4804c0a45..0605af1c808db 100644 --- a/sql/core/src/test/resources/sql-tests/results/udaf.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udaf.sql.out @@ -31,12 +31,7 @@ SELECT default.myDoubleAvg(int_col1, 3) as my_avg from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function spark_catalog.default.mydoubleavg. Expected: 1; Found: 2; line 1 pos 7" - } -} +Invalid number of arguments for function spark_catalog.default.mydoubleavg. Expected: 1; Found: 2; line 1 pos 7 -- !query @@ -53,12 +48,7 @@ SELECT default.udaf1(int_col1) as udaf1 from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Can not load class 'test.non.existent.udaf' when registering the function 'spark_catalog.default.udaf1', please make sure it is on the classpath; line 1 pos 7" - } -} +Can not load class 'test.non.existent.udaf' when registering the function 'spark_catalog.default.udaf1', please make sure it is on the classpath; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out index c6e76cf9993ac..f3b4c72a4880e 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part1.sql.out @@ -475,12 +475,7 @@ having exists (select 1 from onek b struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: sum(DISTINCT (outer(a.four) + b.four))" - } -} +Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: sum(DISTINCT (outer(a.four) + b.four)) -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out index a50daf0487700..f44b69eaadcb6 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-aggregates_part3.sql.out @@ -5,12 +5,7 @@ select udf(max(min(unique1))) from tenk1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query." - } -} +It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out index 7292ed6fd6dd6..5ba048597973d 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-join.sql.out @@ -546,12 +546,7 @@ SELECT udf('') AS `xxx`, udf(i) AS i, udf(k), udf(t) AS t struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'i' is ambiguous, could be: spark_catalog.default.j1_tbl.i, spark_catalog.default.j2_tbl.i.; line 1 pos 29" - } -} +Reference 'i' is ambiguous, could be: spark_catalog.default.j1_tbl.i, spark_catalog.default.j2_tbl.i.; line 1 pos 29 -- !query @@ -3268,12 +3263,7 @@ select * from struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'f1' is ambiguous, could be: j.f1, j.f1.; line 2 pos 72" - } -} +Reference 'f1' is ambiguous, could be: j.f1, j.f1.; line 2 pos 72 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out index b00019a667d07..37ab12fbf7fdd 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out @@ -140,12 +140,7 @@ SELECT udf(a) FROM test_having HAVING udf(min(a)) < udf(max(a)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "grouping expressions sequence is empty, and 'spark_catalog.default.test_having.a' is not an aggregate function. Wrap '(min(spark_catalog.default.test_having.a) AS `min(a#x)`, max(spark_catalog.default.test_having.a) AS `max(a#x)`)' in windowing function(s) or wrap 'spark_catalog.default.test_having.a' in first() (or first_value) if you don't care which value you get." - } -} +grouping expressions sequence is empty, and 'spark_catalog.default.test_having.a' is not an aggregate function. Wrap '(min(spark_catalog.default.test_having.a) AS `min(a#x)`, max(spark_catalog.default.test_having.a) AS `max(a#x)`)' in windowing function(s) or wrap 'spark_catalog.default.test_having.a' in first() (or first_value) if you don't care which value you get. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out index 20bd12c156225..762800748760e 100755 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_implicit.sql.out @@ -210,12 +210,7 @@ SELECT udf(c), udf(count(*)) FROM test_missing_target GROUP BY 3 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 63" - } -} +GROUP BY position 3 is not in select list (valid range is [1, 2]); line 1 pos 63 -- !query @@ -226,12 +221,7 @@ SELECT udf(count(*)) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 14" - } -} +Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 14 -- !query @@ -414,12 +404,7 @@ SELECT udf(count(udf(x.a))) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 14" - } -} +Reference 'b' is ambiguous, could be: x.b, y.b.; line 3 pos 14 -- !query @@ -444,12 +429,7 @@ SELECT udf(count(udf(b))) FROM test_missing_target x, test_missing_target y struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Reference 'b' is ambiguous, could be: x.b, y.b.; line 1 pos 21" - } -} +Reference 'b' is ambiguous, could be: x.b, y.b.; line 1 pos 21 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out index 36f18dab161d7..997308bdbf67a 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out @@ -138,12 +138,7 @@ SELECT array(1) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "ExceptAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table" - } -} +ExceptAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table -- !query @@ -215,12 +210,7 @@ SELECT k, v FROM tab4 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "ExceptAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns" - } -} +ExceptAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out index e355bf541d0a4..0134ad74f5239 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-except.sql.out @@ -100,9 +100,12 @@ WHERE udf(t1.v) >= (SELECT min(udf(t2.v)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Correlated column is not allowed in predicate (CAST(udf(cast(k as string)) AS STRING) = CAST(udf(cast(outer(k#x) as string)) AS STRING)):\nAggregate [cast(udf(cast(max(cast(udf(cast(v#x as string)) as int)) as string)) as int) AS udf(max(udf(v)))#x]\n+- Filter (cast(udf(cast(k#x as string)) as string) = cast(udf(cast(outer(k#x) as string)) as string))\n +- SubqueryAlias t2\n +- View (`t2`, [k#x,v#x])\n +- Project [cast(k#x as string) AS k#x, cast(v#x as int) AS v#x]\n +- Project [k#x, v#x]\n +- SubqueryAlias t2\n +- LocalRelation [k#x, v#x]\n" - } -} +Correlated column is not allowed in predicate (CAST(udf(cast(k as string)) AS STRING) = CAST(udf(cast(outer(k#x) as string)) AS STRING)): +Aggregate [cast(udf(cast(max(cast(udf(cast(v#x as string)) as int)) as string)) as int) AS udf(max(udf(v)))#x] ++- Filter (cast(udf(cast(k#x as string)) as string) = cast(udf(cast(outer(k#x) as string)) as string)) + +- SubqueryAlias t2 + +- View (`t2`, [k#x,v#x]) + +- Project [cast(k#x as string) AS k#x, cast(v#x as int) AS v#x] + +- Project [k#x, v#x] + +- SubqueryAlias t2 + +- LocalRelation [k#x, v#x] diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out index 04d91422171dd..ba9b85bd5afaa 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-analytics.sql.out @@ -207,12 +207,7 @@ SELECT course, udf(year), GROUPING(course) FROM courseSales GROUP BY course, udf struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "grouping() can only be used with GroupingSets/Cube/Rollup" - } -} +grouping() can only be used with GroupingSets/Cube/Rollup -- !query @@ -221,12 +216,7 @@ SELECT course, udf(year), GROUPING_ID(course, year) FROM courseSales GROUP BY ud struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "grouping_id() can only be used with GroupingSets/Cube/Rollup" - } -} +grouping_id() can only be used with GroupingSets/Cube/Rollup -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out index 12251c84e9857..cf1cc0dce0170 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out @@ -15,12 +15,7 @@ SELECT udf(a), udf(COUNT(b)) FROM testData struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(CAST(udf(cast(count(b) as string)) AS BIGINT) AS `udf(count(b))`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get." - } -} +grouping expressions sequence is empty, and 'testdata.a' is not an aggregate function. Wrap '(CAST(udf(cast(count(b) as string)) AS BIGINT) AS `udf(count(b))`)' in windowing function(s) or wrap 'testdata.a' in first() (or first_value) if you don't care which value you get. -- !query @@ -48,12 +43,7 @@ SELECT udf(a), udf(COUNT(udf(b))) FROM testData GROUP BY b struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get." - } -} +expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. -- !query @@ -117,12 +107,7 @@ SELECT udf(a + 2), udf(COUNT(b)) FROM testData GROUP BY a + 1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get." - } -} +expression 'testdata.a' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. -- !query @@ -179,12 +164,7 @@ SELECT udf(COUNT(b)) AS k FROM testData GROUP BY k struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "aggregate functions are not allowed in GROUP BY, but found CAST(udf(cast(count(b) as string)) AS BIGINT)" - } -} +aggregate functions are not allowed in GROUP BY, but found CAST(udf(cast(count(b) as string)) AS BIGINT) -- !query @@ -202,12 +182,7 @@ SELECT k AS a, udf(COUNT(udf(v))) FROM testDataHasSameNameWithAlias GROUP BY udf struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expression 'testdatahassamenamewithalias.k' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get." - } -} +expression 'testdatahassamenamewithalias.k' is neither present in the group by, nor is it an aggregate function. Add to group by or wrap in first() (or first_value) if you don't care which value you get. -- !query @@ -303,12 +278,7 @@ SELECT udf(id) FROM range(10) HAVING id > 0 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "grouping expressions sequence is empty, and 'id' is not an aggregate function. Wrap '()' in windowing function(s) or wrap 'id' in first() (or first_value) if you don't care which value you get." - } -} +grouping expressions sequence is empty, and 'id' is not an aggregate function. Wrap '()' in windowing function(s) or wrap 'id' in first() (or first_value) if you don't care which value you get. -- !query @@ -414,12 +384,7 @@ SELECT every(udf(1)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'every(CAST(udf(cast(1 as string)) AS INT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS INT)' is of int type.; line 1 pos 7" - } -} +cannot resolve 'every(CAST(udf(cast(1 as string)) AS INT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS INT)' is of int type.; line 1 pos 7 -- !query @@ -428,12 +393,7 @@ SELECT some(udf(1S)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'some(CAST(udf(cast(1 as string)) AS SMALLINT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS SMALLINT)' is of smallint type.; line 1 pos 7" - } -} +cannot resolve 'some(CAST(udf(cast(1 as string)) AS SMALLINT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS SMALLINT)' is of smallint type.; line 1 pos 7 -- !query @@ -442,12 +402,7 @@ SELECT any(udf(1L)) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'any(CAST(udf(cast(1 as string)) AS BIGINT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS BIGINT)' is of bigint type.; line 1 pos 7" - } -} +cannot resolve 'any(CAST(udf(cast(1 as string)) AS BIGINT))' due to data type mismatch: argument 1 requires boolean type, however, 'CAST(udf(cast(1 as string)) AS BIGINT)' is of bigint type.; line 1 pos 7 -- !query @@ -456,12 +411,7 @@ SELECT udf(every("true")) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'every('true')' due to data type mismatch: argument 1 requires boolean type, however, ''true'' is of string type.; line 1 pos 11" - } -} +cannot resolve 'every('true')' due to data type mismatch: argument 1 requires boolean type, however, ''true'' is of string type.; line 1 pos 11 -- !query @@ -547,12 +497,10 @@ SELECT udf(count(*)) FROM test_agg WHERE count(*) > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(count(1) > 1L)]\nInvalid expressions: [count(1)]" - } -} + +Aggregate/Window/Generate expressions are not valid in where clause of the query. +Expression in where clause: [(count(1) > 1L)] +Invalid expressions: [count(1)] -- !query @@ -561,12 +509,10 @@ SELECT udf(count(*)) FROM test_agg WHERE count(*) + 1L > 1L struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [((count(1) + 1L) > 1L)]\nInvalid expressions: [count(1)]" - } -} + +Aggregate/Window/Generate expressions are not valid in where clause of the query. +Expression in where clause: [((count(1) + 1L) > 1L)] +Invalid expressions: [count(1)] -- !query @@ -575,9 +521,7 @@ SELECT udf(count(*)) FROM test_agg WHERE k = 1 or k = 2 or count(*) + 1L > 1L or struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nAggregate/Window/Generate expressions are not valid in where clause of the query.\nExpression in where clause: [(((test_agg.k = 1) OR (test_agg.k = 2)) OR (((count(1) + 1L) > 1L) OR (max(test_agg.k) > 1)))]\nInvalid expressions: [count(1), max(test_agg.k)]" - } -} + +Aggregate/Window/Generate expressions are not valid in where clause of the query. +Expression in where clause: [(((test_agg.k = 1) OR (test_agg.k = 2)) OR (((count(1) + 1L) > 1L) OR (max(test_agg.k) > 1)))] +Invalid expressions: [count(1), max(test_agg.k)] diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out index 6713104b299e7..b6c017118a695 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-inline-table.sql.out @@ -94,12 +94,7 @@ select udf(a), b from values ("one", rand(5)), ("two", 3.0D) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot evaluate expression rand(5) in inline table definition; line 1 pos 37" - } -} +cannot evaluate expression rand(5) in inline table definition; line 1 pos 37 -- !query @@ -108,12 +103,7 @@ select udf(a), udf(b) from values ("one", 2.0), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expected 2 columns but found 1 columns in row 1; line 1 pos 27" - } -} +expected 2 columns but found 1 columns in row 1; line 1 pos 27 -- !query @@ -122,12 +112,7 @@ select udf(a), udf(b) from values ("one", array(0, 1)), ("two", struct(1, 2)) as struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "incompatible types found in column b for inline table; line 1 pos 27" - } -} +incompatible types found in column b for inline table; line 1 pos 27 -- !query @@ -136,12 +121,7 @@ select udf(a), udf(b) from values ("one"), ("two") as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "expected 2 columns but found 1 columns in row 0; line 1 pos 27" - } -} +expected 2 columns but found 1 columns in row 0; line 1 pos 27 -- !query @@ -150,12 +130,7 @@ select udf(a), udf(b) from values ("one", random_not_exist_func(1)), ("two", 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Undefined function: random_not_exist_func. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.random_not_exist_func.; line 1 pos 42" - } -} +Undefined function: random_not_exist_func. This function is neither a built-in/temporary function, nor a persistent function that is qualified as spark_catalog.default.random_not_exist_func.; line 1 pos 42 -- !query @@ -164,12 +139,7 @@ select udf(a), udf(b) from values ("one", count(1)), ("two", 2) as data(a, b) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot evaluate expression count(1) in inline table definition; line 1 pos 42" - } -} +cannot evaluate expression count(1) in inline table definition; line 1 pos 42 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out index 917302bfd9cf7..29febc747ea13 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out @@ -95,12 +95,7 @@ SELECT array(1), udf(2) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "IntersectAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table" - } -} +IntersectAll can only be performed on tables with the compatible column types. The first column of the second table is array type which is not compatible with int at same column of first table -- !query @@ -111,12 +106,7 @@ SELECT udf(k), udf(v) FROM tab2 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "IntersectAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns" - } -} +IntersectAll can only be performed on tables with the same number of columns, but the first table has 1 columns and the second table has 2 columns -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out index c4a932580bd29..c097acf18b165 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out @@ -199,12 +199,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Aggregate expression required for pivot, but 'coursesales.earnings' did not appear in any aggregate function." - } -} +Aggregate expression required for pivot, but 'coursesales.earnings' did not appear in any aggregate function. -- !query @@ -219,12 +214,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Aggregate expression required for pivot, but '__auto_generated_subquery_name.year' did not appear in any aggregate function." - } -} +Aggregate expression required for pivot, but '__auto_generated_subquery_name.year' did not appear in any aggregate function. -- !query @@ -276,12 +266,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query." - } -} +It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out index 15f70c3489b99..80a3d9af94269 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out @@ -31,12 +31,7 @@ SELECT default.myDoubleAvg(udf(int_col1), udf(3)) as my_avg from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Invalid number of arguments for function spark_catalog.default.mydoubleavg. Expected: 1; Found: 2; line 1 pos 7" - } -} +Invalid number of arguments for function spark_catalog.default.mydoubleavg. Expected: 1; Found: 2; line 1 pos 7 -- !query @@ -53,12 +48,7 @@ SELECT default.udaf1(udf(int_col1)) as udaf1, udf(default.udaf1(udf(int_col1))) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Can not load class 'test.non.existent.udaf' when registering the function 'spark_catalog.default.udaf1', please make sure it is on the classpath; line 1 pos 94" - } -} +Can not load class 'test.non.existent.udaf' when registering the function 'spark_catalog.default.udaf1', please make sure it is on the classpath; line 1 pos 94 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out index 5779dde1e4c27..076c7dd6a1562 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-window.sql.out @@ -58,12 +58,7 @@ ROWS BETWEEN CURRENT ROW AND 2147483648 FOLLOWING) FROM testData ORDER BY udf(ca struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'ROWS BETWEEN CURRENT ROW AND 2147483648L FOLLOWING' due to data type mismatch: The data type of the upper bound 'bigint' does not match the expected data type 'int'.; line 1 pos 46" - } -} +cannot resolve 'ROWS BETWEEN CURRENT ROW AND 2147483648L FOLLOWING' due to data type mismatch: The data type of the upper bound 'bigint' does not match the expected data type 'int'.; line 1 pos 46 -- !query @@ -193,12 +188,7 @@ ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING) FROM testData ORDER BY cate, u struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING' due to data type mismatch: Window frame upper bound '1' does not follow the lower bound 'unboundedfollowing$()'.; line 1 pos 38" - } -} +cannot resolve 'ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING' due to data type mismatch: Window frame upper bound '1' does not follow the lower bound 'unboundedfollowing$()'.; line 1 pos 38 -- !query @@ -208,12 +198,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, udf(val) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 38" - } -} +cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 38 -- !query @@ -223,12 +208,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, udf(val) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY CAST(udf(cast(val as string)) AS INT) ASC NULLS FIRST, testdata.cate ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: cast(udf(cast(val#x as string)) as int) ASC NULLS FIRST,cate#x ASC NULLS FIRST; line 1 pos 38" - } -} +cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY CAST(udf(cast(val as string)) AS INT) ASC NULLS FIRST, testdata.cate ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: cast(udf(cast(val#x as string)) as int) ASC NULLS FIRST,cate#x ASC NULLS FIRST; line 1 pos 38 -- !query @@ -238,12 +218,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, udf(val) struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY current_timestamp() ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 38" - } -} +cannot resolve '(PARTITION BY CAST(udf(cast(cate as string)) AS STRING) ORDER BY current_timestamp() ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 38 -- !query @@ -253,12 +228,7 @@ RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING) FROM testData ORDER BY udf(cate), val struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING' due to data type mismatch: The lower bound of a window frame must be less than or equal to the upper bound; line 1 pos 38" - } -} +cannot resolve 'RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING' due to data type mismatch: The lower bound of a window frame must be less than or equal to the upper bound; line 1 pos 38 -- !query @@ -268,12 +238,13 @@ RANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cat struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nFrame bound value must be a literal.(line 2, pos 30)\n\n== SQL ==\nSELECT udf(val), cate, count(val) OVER(PARTITION BY udf(cate) ORDER BY udf(val)\nRANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cate, val(val)\n------------------------------^^^\n" - } -} + +Frame bound value must be a literal.(line 2, pos 30) + +== SQL == +SELECT udf(val), cate, count(val) OVER(PARTITION BY udf(cate) ORDER BY udf(val) +RANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cate, val(val) +------------------------------^^^ -- !query @@ -350,12 +321,7 @@ SELECT udf(val), cate, row_number() OVER(PARTITION BY cate) FROM testData ORDER struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Window function row_number() requires window to be ordered, please add ORDER BY clause. For example SELECT row_number()(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table" - } -} +Window function row_number() requires window to be ordered, please add ORDER BY clause. For example SELECT row_number()(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/window.sql.out b/sql/core/src/test/resources/sql-tests/results/window.sql.out index d533505a0a454..dd7a159936e29 100644 --- a/sql/core/src/test/resources/sql-tests/results/window.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/window.sql.out @@ -102,12 +102,7 @@ ROWS BETWEEN CURRENT ROW AND 2147483648 FOLLOWING) FROM testData ORDER BY cate, struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'ROWS BETWEEN CURRENT ROW AND 2147483648L FOLLOWING' due to data type mismatch: The data type of the upper bound 'bigint' does not match the expected data type 'int'.; line 1 pos 41" - } -} +cannot resolve 'ROWS BETWEEN CURRENT ROW AND 2147483648L FOLLOWING' due to data type mismatch: The data type of the upper bound 'bigint' does not match the expected data type 'int'.; line 1 pos 41 -- !query @@ -329,12 +324,7 @@ ORDER BY cate, val_date struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val_date ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1 02:03:04.001' DAY TO SECOND FOLLOWING)' due to data type mismatch: The data type 'date' used in the order specification does not match the data type 'interval day to second' which is used in the range frame.; line 1 pos 46" - } -} +cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val_date ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND INTERVAL '1 02:03:04.001' DAY TO SECOND FOLLOWING)' due to data type mismatch: The data type 'date' used in the order specification does not match the data type 'interval day to second' which is used in the range frame.; line 1 pos 46 -- !query @@ -361,12 +351,7 @@ ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING) FROM testData ORDER BY cate, v struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING' due to data type mismatch: Window frame upper bound '1' does not follow the lower bound 'unboundedfollowing$()'.; line 1 pos 33" - } -} +cannot resolve 'ROWS BETWEEN UNBOUNDED FOLLOWING AND 1 FOLLOWING' due to data type mismatch: Window frame upper bound '1' does not follow the lower bound 'unboundedfollowing$()'.; line 1 pos 33 -- !query @@ -376,12 +361,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(PARTITION BY testdata.cate RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 33" - } -} +cannot resolve '(PARTITION BY testdata.cate RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame cannot be used in an unordered window specification.; line 1 pos 33 -- !query @@ -391,12 +371,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val ASC NULLS FIRST, testdata.cate ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: val#x ASC NULLS FIRST,cate#x ASC NULLS FIRST; line 1 pos 33" - } -} +cannot resolve '(PARTITION BY testdata.cate ORDER BY testdata.val ASC NULLS FIRST, testdata.cate ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: val#x ASC NULLS FIRST,cate#x ASC NULLS FIRST; line 1 pos 33 -- !query @@ -406,12 +381,7 @@ RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve '(PARTITION BY testdata.cate ORDER BY current_timestamp() ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 33" - } -} +cannot resolve '(PARTITION BY testdata.cate ORDER BY current_timestamp() ASC NULLS FIRST RANGE BETWEEN CURRENT ROW AND 1 FOLLOWING)' due to data type mismatch: The data type 'timestamp' used in the order specification does not match the data type 'int' which is used in the range frame.; line 1 pos 33 -- !query @@ -421,12 +391,7 @@ RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING) FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "cannot resolve 'RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING' due to data type mismatch: The lower bound of a window frame must be less than or equal to the upper bound; line 1 pos 33" - } -} +cannot resolve 'RANGE BETWEEN 1 FOLLOWING AND 1 PRECEDING' due to data type mismatch: The lower bound of a window frame must be less than or equal to the upper bound; line 1 pos 33 -- !query @@ -436,12 +401,13 @@ RANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cat struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "\nFrame bound value must be a literal.(line 2, pos 30)\n\n== SQL ==\nSELECT val, cate, count(val) OVER(PARTITION BY cate ORDER BY val\nRANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cate, val\n------------------------------^^^\n" - } -} + +Frame bound value must be a literal.(line 2, pos 30) + +== SQL == +SELECT val, cate, count(val) OVER(PARTITION BY cate ORDER BY val +RANGE BETWEEN CURRENT ROW AND current_date PRECEDING) FROM testData ORDER BY cate, val +------------------------------^^^ -- !query @@ -518,12 +484,7 @@ SELECT val, cate, row_number() OVER(PARTITION BY cate) FROM testData ORDER BY ca struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Window function row_number() requires window to be ordered, please add ORDER BY clause. For example SELECT row_number()(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table" - } -} +Window function row_number() requires window to be ordered, please add ORDER BY clause. For example SELECT row_number()(value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table -- !query @@ -595,12 +556,7 @@ FROM testData ORDER BY cate, val struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "window aggregate function with filter predicate is not supported yet." - } -} +window aggregate function with filter predicate is not supported yet. -- !query @@ -1241,12 +1197,7 @@ FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Window specification w is not defined in the WINDOW clause." - } -} +Window specification w is not defined in the WINDOW clause. -- !query @@ -1258,9 +1209,4 @@ FROM struct<> -- !query output org.apache.spark.sql.AnalysisException -{ - "errorClass" : "LEGACY", - "messageParameters" : { - "message" : "Window specification w is not defined in the WINDOW clause." - } -} +Window specification w is not defined in the WINDOW clause. diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala index 32a2f71d5f727..dd20c416f5251 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestHelper.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import scala.util.control.NonFatal -import org.apache.spark.{ErrorMessageFormat, SparkException, SparkThrowable} +import org.apache.spark.{SparkException, SparkThrowable} import org.apache.spark.ErrorMessageFormat.MINIMAL import org.apache.spark.SparkThrowableHelper.getMessage import org.apache.spark.sql.catalyst.planning.PhysicalOperation @@ -73,14 +73,6 @@ trait SQLQueryTestHelper { if (isSorted(df.queryExecution.analyzed)) (schema, answer) else (schema, answer.sorted) } - private def toLegacyJson(msg: String, format: ErrorMessageFormat.Value): String = { - val e = new Throwable with SparkThrowable { - override val getErrorClass = null - override val getMessage = msg - } - getMessage(e, format) - } - /** * This method handles exceptions occurred during query execution as they may need special care * to become comparable to the expected output. @@ -99,7 +91,7 @@ trait SQLQueryTestHelper { // Also implement a crude way of masking expression IDs in the error message // with a generic pattern "###". val msg = if (a.plan.nonEmpty) a.getSimpleMessage else a.getMessage - (emptySchema, Seq(a.getClass.getName, toLegacyJson(msg.replaceAll("#\\d+", "#x"), format))) + (emptySchema, Seq(a.getClass.getName, msg.replaceAll("#\\d+", "#x"))) case s: SparkException if s.getCause != null => // For a runtime exception, it is hard to match because its message contains // information of stage, task ID, etc. @@ -108,11 +100,11 @@ trait SQLQueryTestHelper { case e: SparkThrowable with Throwable if e.getErrorClass != null => (emptySchema, Seq(e.getClass.getName, getMessage(e, format))) case cause => - (emptySchema, Seq(cause.getClass.getName, toLegacyJson(cause.getMessage, format))) + (emptySchema, Seq(cause.getClass.getName, cause.getMessage)) } case NonFatal(e) => // If there is an exception, put the exception class followed by the message. - (emptySchema, Seq(e.getClass.getName, toLegacyJson(e.getMessage, format))) + (emptySchema, Seq(e.getClass.getName, e.getMessage)) } } }