Skip to content

Commit

Permalink
[SPARK-37149][SQL][FOLLOWUP] Update error messages for arithmetic ove…
Browse files Browse the repository at this point in the history
…rflow under ANSI mode

### What changes were proposed in this pull request?
This is a follow-up PR for SPARK-37149. This PR updates the wording of arithmetic overflow error messages under ANSI mode to make them more informative.

### Why are the changes needed?
Make error messages more informative.

### Does this PR introduce _any_ user-facing change?
Yes. Certain error messages will be changed.

### How was this patch tested?
Existing tests.

Closes #34479 from allisonwang-db/spark-37149-follow-up.

Authored-by: allisonwang-db <allison.wang@databricks.com>
Signed-off-by: Hyukjin Kwon <gurwls223@apache.org>
  • Loading branch information
allisonwang-db authored and HyukjinKwon committed Nov 5, 2021
1 parent d7a608d commit 840ce67
Show file tree
Hide file tree
Showing 16 changed files with 65 additions and 66 deletions.
6 changes: 3 additions & 3 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,22 +8,22 @@
"sqlState" : "22005"
},
"CANNOT_CHANGE_DECIMAL_PRECISION" : {
"message" : [ "%s cannot be represented as Decimal(%s, %s). You can set %s to false to bypass this error." ],
"message" : [ "%s cannot be represented as Decimal(%s, %s). If necessary set %s to false to bypass this error." ],
"sqlState" : "22005"
},
"CANNOT_PARSE_DECIMAL" : {
"message" : [ "Cannot parse decimal" ],
"sqlState" : "42000"
},
"CAST_CAUSES_OVERFLOW" : {
"message" : [ "Casting %s to %s causes overflow. You can use 'try_cast' or set %s to false to bypass this error." ],
"message" : [ "Casting %s to %s causes overflow. To return NULL instead, use 'try_cast'. If necessary set %s to false to bypass this error." ],
"sqlState" : "22005"
},
"CONCURRENT_QUERY" : {
"message" : [ "Another instance of this query was just started by a concurrent session." ]
},
"DIVIDE_BY_ZERO" : {
"message" : [ "divide by zero. You can use 'try_divide' or set %s to false (except for ANSI interval type) to bypass this error." ],
"message" : [ "divide by zero. To return NULL instead, use 'try_divide'. If necessary set %s to false (except for ANSI interval type) to bypass this error." ],
"sqlState" : "22012"
},
"DUPLICATE_KEY" : {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ class SparkThrowableSuite extends SparkFunSuite {

// Does not fail with too many args (expects 0 args)
assert(getMessage("DIVIDE_BY_ZERO", Array("foo", "bar")) ==
"divide by zero. You can use 'try_divide' or set foo to false " +
"divide by zero. To return NULL instead, use 'try_divide'. If necessary set foo to false " +
"(except for ANSI interval type) to bypass this error.")
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,15 +42,15 @@ object MathUtils {

def toIntExact(a: Long): Int = withOverflow(Math.toIntExact(a))

def floorDiv(a: Int, b: Int): Int = withOverflow(Math.floorDiv(a, b), Some("try_divide"))
def floorDiv(a: Int, b: Int): Int = withOverflow(Math.floorDiv(a, b), hint = "try_divide")

def floorDiv(a: Long, b: Long): Long = withOverflow(Math.floorDiv(a, b), Some("try_divide"))
def floorDiv(a: Long, b: Long): Long = withOverflow(Math.floorDiv(a, b), hint = "try_divide")

def floorMod(a: Int, b: Int): Int = withOverflow(Math.floorMod(a, b))

def floorMod(a: Long, b: Long): Long = withOverflow(Math.floorMod(a, b))

private def withOverflow[A](f: => A, hint: Option[String] = None): A = {
private def withOverflow[A](f: => A, hint: String = ""): A = {
try {
f
} catch {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ object QueryExecutionErrors {
}

def overflowInIntegralDivideError(): ArithmeticException = {
arithmeticOverflowError("Overflow in integral divide", Some("try_divide"))
arithmeticOverflowError("Overflow in integral divide", "try_divide")
}

def mapSizeExceedArraySizeWhenZipMapError(size: Int): RuntimeException = {
Expand Down Expand Up @@ -435,11 +435,10 @@ object QueryExecutionErrors {
s"to false to bypass this error.")
}

def arithmeticOverflowError(
message: String, hint: Option[String] = None): ArithmeticException = {
new ArithmeticException(s"$message. You can ${hint.map(x => s"use '$x' or ").getOrElse("")}" +
s"set ${SQLConf.ANSI_ENABLED.key} to false (except for ANSI interval type) " +
"to bypass this error.")
def arithmeticOverflowError(message: String, hint: String = ""): ArithmeticException = {
val alternative = if (hint.nonEmpty) s" To return NULL instead, use '$hint'." else ""
new ArithmeticException(s"$message.$alternative If necessary set " +
s"${SQLConf.ANSI_ENABLED.key} to false (except for ANSI interval type) to bypass this error.")
}

def unaryMinusCauseOverflowError(originValue: AnyVal): ArithmeticException = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ select (5e36BD + 0.1) + 5e36BD
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
Decimal(expanded,10000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1). You can set spark.sql.ansi.enabled to false to bypass this error.
Decimal(expanded,10000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1). If necessary set spark.sql.ansi.enabled to false to bypass this error.


-- !query
Expand All @@ -85,7 +85,7 @@ select (-4e36BD - 0.1) - 7e36BD
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
Decimal(expanded,-11000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1). You can set spark.sql.ansi.enabled to false to bypass this error.
Decimal(expanded,-11000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1). If necessary set spark.sql.ansi.enabled to false to bypass this error.


-- !query
Expand All @@ -94,7 +94,7 @@ select 12345678901234567890.0 * 12345678901234567890.0
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
Decimal(expanded,152415787532388367501905199875019052100,39,0}) cannot be represented as Decimal(38, 2). You can set spark.sql.ansi.enabled to false to bypass this error.
Decimal(expanded,152415787532388367501905199875019052100,39,0}) cannot be represented as Decimal(38, 2). If necessary set spark.sql.ansi.enabled to false to bypass this error.


-- !query
Expand All @@ -103,7 +103,7 @@ select 1e35BD / 0.1
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
Decimal(expanded,1000000000000000000000000000000000000,37,0}) cannot be represented as Decimal(38, 6). You can set spark.sql.ansi.enabled to false to bypass this error.
Decimal(expanded,1000000000000000000000000000000000000,37,0}) cannot be represented as Decimal(38, 6). If necessary set spark.sql.ansi.enabled to false to bypass this error.


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ select interval '2 seconds' / 0
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand Down Expand Up @@ -243,7 +243,7 @@ select interval '2' year / 0
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand Down Expand Up @@ -1750,7 +1750,7 @@ select -(a) from values (interval '-2147483648 months', interval '2147483647 mon
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
integer overflow. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -1759,7 +1759,7 @@ select a - b from values (interval '-2147483648 months', interval '2147483647 mo
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
integer overflow. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -1768,7 +1768,7 @@ select b + interval '1 month' from values (interval '-2147483648 months', interv
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
integer overflow. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand Down Expand Up @@ -1997,7 +1997,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1
struct<>
-- !query output
java.lang.ArithmeticException
Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -2006,7 +2006,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L
struct<>
-- !query output
java.lang.ArithmeticException
Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand Down Expand Up @@ -2049,7 +2049,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1
struct<>
-- !query output
java.lang.ArithmeticException
Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -2058,7 +2058,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L
struct<>
-- !query output
java.lang.ArithmeticException
Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand Down
18 changes: 9 additions & 9 deletions sql/core/src/test/resources/sql-tests/results/interval.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ select interval '2 seconds' / 0
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand Down Expand Up @@ -237,7 +237,7 @@ select interval '2' year / 0
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand Down Expand Up @@ -1739,7 +1739,7 @@ select -(a) from values (interval '-2147483648 months', interval '2147483647 mon
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
integer overflow. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -1748,7 +1748,7 @@ select a - b from values (interval '-2147483648 months', interval '2147483647 mo
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
integer overflow. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -1757,7 +1757,7 @@ select b + interval '1 month' from values (interval '-2147483648 months', interv
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
integer overflow. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand Down Expand Up @@ -1986,7 +1986,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1
struct<>
-- !query output
java.lang.ArithmeticException
Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -1995,7 +1995,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L
struct<>
-- !query output
java.lang.ArithmeticException
Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand Down Expand Up @@ -2038,7 +2038,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1
struct<>
-- !query output
java.lang.ArithmeticException
Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -2047,7 +2047,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L
struct<>
-- !query output
java.lang.ArithmeticException
Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ SELECT CASE WHEN 1=0 THEN 1/0 WHEN 1=1 THEN 1 ELSE 2/0 END
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -188,7 +188,7 @@ SELECT CASE 1 WHEN 0 THEN 1/0 WHEN 1 THEN 1 ELSE 2/0 END
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -197,7 +197,7 @@ SELECT CASE WHEN i > 100 THEN 1/0 ELSE 0 END FROM case_tbl
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ SELECT int(float('2147483647'))
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
Casting 2.14748365E9 to int causes overflow. You can use 'try_cast' or set spark.sql.ansi.enabled to false to bypass this error.
Casting 2.14748365E9 to int causes overflow. To return NULL instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this error.


-- !query
Expand All @@ -342,7 +342,7 @@ SELECT int(float('-2147483900'))
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
Casting -2.1474839E9 to int causes overflow. You can use 'try_cast' or set spark.sql.ansi.enabled to false to bypass this error.
Casting -2.1474839E9 to int causes overflow. To return NULL instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this error.


-- !query
Expand Down Expand Up @@ -375,7 +375,7 @@ SELECT bigint(float('-9223380000000000000'))
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
Casting -9.22338E18 to int causes overflow. You can use 'try_cast' or set spark.sql.ansi.enabled to false to bypass this error.
Casting -9.22338E18 to int causes overflow. To return NULL instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this error.


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -833,7 +833,7 @@ SELECT bigint(double('-9223372036854780000'))
struct<>
-- !query output
org.apache.spark.SparkArithmeticException
Casting -9.22337203685478E18 to long causes overflow. You can use 'try_cast' or set spark.sql.ansi.enabled to false to bypass this error.
Casting -9.22337203685478E18 to long causes overflow. To return NULL instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this error.


-- !query
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ SELECT '' AS five, i.f1, i.f1 * smallint('2') AS x FROM INT4_TBL i
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
integer overflow. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -220,7 +220,7 @@ SELECT '' AS five, i.f1, i.f1 * int('2') AS x FROM INT4_TBL i
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
integer overflow. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -240,7 +240,7 @@ SELECT '' AS five, i.f1, i.f1 + smallint('2') AS x FROM INT4_TBL i
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
integer overflow. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -261,7 +261,7 @@ SELECT '' AS five, i.f1, i.f1 + int('2') AS x FROM INT4_TBL i
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
integer overflow. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -282,7 +282,7 @@ SELECT '' AS five, i.f1, i.f1 - smallint('2') AS x FROM INT4_TBL i
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
integer overflow. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand All @@ -303,7 +303,7 @@ SELECT '' AS five, i.f1, i.f1 - int('2') AS x FROM INT4_TBL i
struct<>
-- !query output
java.lang.ArithmeticException
integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
integer overflow. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.


-- !query
Expand Down
Loading

0 comments on commit 840ce67

Please sign in to comment.