diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 301f8d0b99935..cc4ecc6efebca 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -8,7 +8,7 @@ "sqlState" : "22005" }, "CANNOT_CHANGE_DECIMAL_PRECISION" : { - "message" : [ "%s cannot be represented as Decimal(%s, %s)." ], + "message" : [ "%s cannot be represented as Decimal(%s, %s). You can set %s to false to bypass this error." ], "sqlState" : "22005" }, "CANNOT_PARSE_DECIMAL" : { @@ -16,14 +16,14 @@ "sqlState" : "42000" }, "CAST_CAUSES_OVERFLOW" : { - "message" : [ "Casting %s to %s causes overflow" ], + "message" : [ "Casting %s to %s causes overflow. You can use 'try_cast' or set %s to false to bypass this error." ], "sqlState" : "22005" }, "CONCURRENT_QUERY" : { "message" : [ "Another instance of this query was just started by a concurrent session." ] }, "DIVIDE_BY_ZERO" : { - "message" : [ "divide by zero" ], + "message" : [ "divide by zero. You can use 'try_divide' or set %s to false (except for ANSI interval type) to bypass this error." ], "sqlState" : "22012" }, "DUPLICATE_KEY" : { diff --git a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala index 1cd3ba3b916d8..84353a5f23b99 100644 --- a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala @@ -124,7 +124,9 @@ class SparkThrowableSuite extends SparkFunSuite { } // Does not fail with too many args (expects 0 args) - assert(getMessage("DIVIDE_BY_ZERO", Array("foo", "bar")) == "divide by zero") + assert(getMessage("DIVIDE_BY_ZERO", Array("foo", "bar")) == + "divide by zero. You can use 'try_divide' or set foo to false " + + "(except for ANSI interval type) to bypass this error.") } test("Error message is formatted") { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index 06e8982f64ef5..6cd4ae9dab813 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -23,7 +23,7 @@ import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.trees.TreePattern.{BINARY_ARITHMETIC, TreePattern, UNARY_POSITIVE} -import org.apache.spark.sql.catalyst.util.{IntervalUtils, TypeUtils} +import org.apache.spark.sql.catalyst.util.{IntervalUtils, MathUtils, TypeUtils} import org.apache.spark.sql.errors.QueryExecutionErrors import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ @@ -69,9 +69,9 @@ case class UnaryMinus( """.stripMargin }) case IntegerType | LongType if failOnError => + val mathUtils = MathUtils.getClass.getCanonicalName.stripSuffix("$") nullSafeCodeGen(ctx, ev, eval => { - val mathClass = classOf[Math].getName - s"${ev.value} = $mathClass.negateExact($eval);" + s"${ev.value} = $mathUtils.negateExact($eval);" }) case dt: NumericType => nullSafeCodeGen(ctx, ev, eval => { val originValue = ctx.freshName("origin") @@ -87,8 +87,8 @@ case class UnaryMinus( defineCodeGen(ctx, ev, c => s"$iu.$method($c)") case _: AnsiIntervalType => nullSafeCodeGen(ctx, ev, eval => { - val mathClass = classOf[Math].getName - s"${ev.value} = $mathClass.negateExact($eval);" + val mathUtils = MathUtils.getClass.getCanonicalName.stripSuffix("$") + s"${ev.value} = $mathUtils.negateExact($eval);" }) } @@ -96,8 +96,8 @@ case class UnaryMinus( case CalendarIntervalType if failOnError => IntervalUtils.negateExact(input.asInstanceOf[CalendarInterval]) case CalendarIntervalType => IntervalUtils.negate(input.asInstanceOf[CalendarInterval]) - case _: DayTimeIntervalType => Math.negateExact(input.asInstanceOf[Long]) - case _: YearMonthIntervalType => Math.negateExact(input.asInstanceOf[Int]) + case _: DayTimeIntervalType => MathUtils.negateExact(input.asInstanceOf[Long]) + case _: YearMonthIntervalType => MathUtils.negateExact(input.asInstanceOf[Int]) case _ => numeric.negate(input) } @@ -191,10 +191,12 @@ case class Abs(child: Expression, failOnError: Boolean = SQLConf.get.ansiEnabled |""".stripMargin) case IntegerType | LongType if failOnError => - defineCodeGen(ctx, ev, c => s"$c < 0 ? java.lang.Math.negateExact($c) : $c") + val mathUtils = MathUtils.getClass.getCanonicalName.stripSuffix("$") + defineCodeGen(ctx, ev, c => s"$c < 0 ? $mathUtils.negateExact($c) : $c") case _: AnsiIntervalType => - defineCodeGen(ctx, ev, c => s"$c < 0 ? java.lang.Math.negateExact($c) : $c") + val mathUtils = MathUtils.getClass.getCanonicalName.stripSuffix("$") + defineCodeGen(ctx, ev, c => s"$c < 0 ? $mathUtils.negateExact($c) : $c") case dt: NumericType => defineCodeGen(ctx, ev, c => s"(${CodeGenerator.javaType(dt)})(java.lang.Math.abs($c))") @@ -241,8 +243,8 @@ abstract class BinaryArithmetic extends BinaryOperator with NullIntolerant { assert(exactMathMethod.isDefined, s"The expression '$nodeName' must override the exactMathMethod() method " + "if it is supposed to operate over interval types.") - val mathClass = classOf[Math].getName - defineCodeGen(ctx, ev, (eval1, eval2) => s"$mathClass.${exactMathMethod.get}($eval1, $eval2)") + val mathUtils = MathUtils.getClass.getCanonicalName.stripSuffix("$") + defineCodeGen(ctx, ev, (eval1, eval2) => s"$mathUtils.${exactMathMethod.get}($eval1, $eval2)") // byte and short are casted into int when add, minus, times or divide case ByteType | ShortType => nullSafeCodeGen(ctx, ev, (eval1, eval2) => { @@ -267,8 +269,8 @@ abstract class BinaryArithmetic extends BinaryOperator with NullIntolerant { case IntegerType | LongType => nullSafeCodeGen(ctx, ev, (eval1, eval2) => { val operation = if (failOnError && exactMathMethod.isDefined) { - val mathClass = classOf[Math].getName - s"$mathClass.${exactMathMethod.get}($eval1, $eval2)" + val mathUtils = MathUtils.getClass.getCanonicalName.stripSuffix("$") + s"$mathUtils.${exactMathMethod.get}($eval1, $eval2)" } else { s"$eval1 $symbol $eval2" } @@ -326,9 +328,9 @@ case class Add( IntervalUtils.add( input1.asInstanceOf[CalendarInterval], input2.asInstanceOf[CalendarInterval]) case _: DayTimeIntervalType => - Math.addExact(input1.asInstanceOf[Long], input2.asInstanceOf[Long]) + MathUtils.addExact(input1.asInstanceOf[Long], input2.asInstanceOf[Long]) case _: YearMonthIntervalType => - Math.addExact(input1.asInstanceOf[Int], input2.asInstanceOf[Int]) + MathUtils.addExact(input1.asInstanceOf[Int], input2.asInstanceOf[Int]) case _ => numeric.plus(input1, input2) } @@ -372,9 +374,9 @@ case class Subtract( IntervalUtils.subtract( input1.asInstanceOf[CalendarInterval], input2.asInstanceOf[CalendarInterval]) case _: DayTimeIntervalType => - Math.subtractExact(input1.asInstanceOf[Long], input2.asInstanceOf[Long]) + MathUtils.subtractExact(input1.asInstanceOf[Long], input2.asInstanceOf[Long]) case _: YearMonthIntervalType => - Math.subtractExact(input1.asInstanceOf[Int], input2.asInstanceOf[Int]) + MathUtils.subtractExact(input1.asInstanceOf[Int], input2.asInstanceOf[Int]) case _ => numeric.minus(input1, input2) } @@ -441,8 +443,8 @@ trait DivModLike extends BinaryArithmetic { null } else { if (isZero(input2)) { - // when we reach here, failOnError must bet true. - throw QueryExecutionErrors.divideByZeroError + // when we reach here, failOnError must be true. + throw QueryExecutionErrors.divideByZeroError() } if (checkDivideOverflow && input1 == Long.MinValue && input2 == -1) { throw QueryExecutionErrors.overflowInIntegralDivideError() diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index 62a2657264790..019d5963958c7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -616,8 +616,8 @@ object IntervalUtils { monthsWithFraction: Double, daysWithFraction: Double, microsWithFraction: Double): CalendarInterval = { - val truncatedMonths = Math.toIntExact(monthsWithFraction.toLong) - val truncatedDays = Math.toIntExact(daysWithFraction.toLong) + val truncatedMonths = MathUtils.toIntExact(monthsWithFraction.toLong) + val truncatedDays = MathUtils.toIntExact(daysWithFraction.toLong) val micros = microsWithFraction + MICROS_PER_DAY * (daysWithFraction - truncatedDays) new CalendarInterval(truncatedMonths, truncatedDays, micros.round) } @@ -644,9 +644,9 @@ object IntervalUtils { * @throws ArithmeticException if the result overflows any field value */ def negateExact(interval: CalendarInterval): CalendarInterval = { - val months = Math.negateExact(interval.months) - val days = Math.negateExact(interval.days) - val microseconds = Math.negateExact(interval.microseconds) + val months = MathUtils.negateExact(interval.months) + val days = MathUtils.negateExact(interval.days) + val microseconds = MathUtils.negateExact(interval.microseconds) new CalendarInterval(months, days, microseconds) } @@ -663,9 +663,9 @@ object IntervalUtils { * @throws ArithmeticException if the result overflows any field value */ def addExact(left: CalendarInterval, right: CalendarInterval): CalendarInterval = { - val months = Math.addExact(left.months, right.months) - val days = Math.addExact(left.days, right.days) - val microseconds = Math.addExact(left.microseconds, right.microseconds) + val months = MathUtils.addExact(left.months, right.months) + val days = MathUtils.addExact(left.days, right.days) + val microseconds = MathUtils.addExact(left.microseconds, right.microseconds) new CalendarInterval(months, days, microseconds) } @@ -685,9 +685,9 @@ object IntervalUtils { * @throws ArithmeticException if the result overflows any field value */ def subtractExact(left: CalendarInterval, right: CalendarInterval): CalendarInterval = { - val months = Math.subtractExact(left.months, right.months) - val days = Math.subtractExact(left.days, right.days) - val microseconds = Math.subtractExact(left.microseconds, right.microseconds) + val months = MathUtils.subtractExact(left.months, right.months) + val days = MathUtils.subtractExact(left.days, right.days) + val microseconds = MathUtils.subtractExact(left.microseconds, right.microseconds) new CalendarInterval(months, days, microseconds) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/MathUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/MathUtils.scala new file mode 100644 index 0000000000000..da0db71aa22e2 --- /dev/null +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/MathUtils.scala @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.catalyst.util + +import org.apache.spark.sql.errors.QueryExecutionErrors + +/** + * Helper functions for arithmetic operations with overflow. + */ +object MathUtils { + + def addExact(a: Int, b: Int): Int = withOverflow(Math.addExact(a, b)) + + def addExact(a: Long, b: Long): Long = withOverflow(Math.addExact(a, b)) + + def subtractExact(a: Int, b: Int): Int = withOverflow(Math.subtractExact(a, b)) + + def subtractExact(a: Long, b: Long): Long = withOverflow(Math.subtractExact(a, b)) + + def multiplyExact(a: Int, b: Int): Int = withOverflow(Math.multiplyExact(a, b)) + + def multiplyExact(a: Long, b: Long): Long = withOverflow(Math.multiplyExact(a, b)) + + def negateExact(a: Int): Int = withOverflow(Math.negateExact(a)) + + def negateExact(a: Long): Long = withOverflow(Math.negateExact(a)) + + def toIntExact(a: Long): Int = withOverflow(Math.toIntExact(a)) + + def floorDiv(a: Int, b: Int): Int = withOverflow(Math.floorDiv(a, b), Some("try_divide")) + + def floorDiv(a: Long, b: Long): Long = withOverflow(Math.floorDiv(a, b), Some("try_divide")) + + def floorMod(a: Int, b: Int): Int = withOverflow(Math.floorMod(a, b)) + + def floorMod(a: Long, b: Long): Long = withOverflow(Math.floorMod(a, b)) + + private def withOverflow[A](f: => A, hint: Option[String] = None): A = { + try { + f + } catch { + case e: ArithmeticException => + throw QueryExecutionErrors.arithmeticOverflowError(e.getMessage, hint) + } + } +} diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 54e4e22b083ca..dc266503af403 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -93,15 +93,15 @@ object QueryExecutionErrors { } def castingCauseOverflowError(t: Any, targetType: String): ArithmeticException = { - new SparkArithmeticException (errorClass = "CAST_CAUSES_OVERFLOW", - messageParameters = Array(t.toString, targetType)) + new SparkArithmeticException(errorClass = "CAST_CAUSES_OVERFLOW", + messageParameters = Array(t.toString, targetType, SQLConf.ANSI_ENABLED.key)) } def cannotChangeDecimalPrecisionError( value: Decimal, decimalPrecision: Int, decimalScale: Int): ArithmeticException = { new SparkArithmeticException(errorClass = "CANNOT_CHANGE_DECIMAL_PRECISION", messageParameters = Array(value.toDebugString, - decimalPrecision.toString, decimalScale.toString)) + decimalPrecision.toString, decimalScale.toString, SQLConf.ANSI_ENABLED.key)) } def invalidInputSyntaxForNumericError(s: UTF8String): NumberFormatException = { @@ -152,7 +152,8 @@ object QueryExecutionErrors { } def divideByZeroError(): ArithmeticException = { - new SparkArithmeticException(errorClass = "DIVIDE_BY_ZERO", messageParameters = Array.empty) + new SparkArithmeticException( + errorClass = "DIVIDE_BY_ZERO", messageParameters = Array(SQLConf.ANSI_ENABLED.key)) } def invalidArrayIndexError(index: Int, numElements: Int): ArrayIndexOutOfBoundsException = { @@ -179,11 +180,11 @@ object QueryExecutionErrors { } def overflowInSumOfDecimalError(): ArithmeticException = { - new ArithmeticException("Overflow in sum of decimals.") + arithmeticOverflowError("Overflow in sum of decimals") } def overflowInIntegralDivideError(): ArithmeticException = { - new ArithmeticException("Overflow in integral divide.") + arithmeticOverflowError("Overflow in integral divide", Some("try_divide")) } def mapSizeExceedArraySizeWhenZipMapError(size: Int): RuntimeException = { @@ -392,13 +393,20 @@ object QueryExecutionErrors { new IllegalStateException("table stats must be specified.") } + def arithmeticOverflowError( + message: String, hint: Option[String] = None): ArithmeticException = { + new ArithmeticException(s"$message. You can ${hint.map(x => s"use '$x' or ").getOrElse("")}" + + s"set ${SQLConf.ANSI_ENABLED.key} to false (except for ANSI interval type) " + + "to bypass this error.") + } + def unaryMinusCauseOverflowError(originValue: AnyVal): ArithmeticException = { - new ArithmeticException(s"- $originValue caused overflow.") + arithmeticOverflowError(s"- $originValue caused overflow") } def binaryArithmeticCauseOverflowError( eval1: Short, symbol: String, eval2: Short): ArithmeticException = { - new ArithmeticException(s"$eval1 $symbol $eval2 caused overflow.") + arithmeticOverflowError(s"$eval1 $symbol $eval2 caused overflow") } def failedSplitSubExpressionMsg(length: Int): String = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/numerics.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/numerics.scala index a3e76797b6bf2..6811e50ccdf94 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/numerics.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/numerics.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.types import scala.math.Numeric._ import scala.math.Ordering -import org.apache.spark.sql.catalyst.util.SQLOrderingUtil +import org.apache.spark.sql.catalyst.util.{MathUtils, SQLOrderingUtil} import org.apache.spark.sql.errors.QueryExecutionErrors import org.apache.spark.sql.types.Decimal.DecimalIsConflicted @@ -93,23 +93,23 @@ private[sql] object ShortExactNumeric extends ShortIsIntegral with Ordering.Shor private[sql] object IntegerExactNumeric extends IntIsIntegral with Ordering.IntOrdering { - override def plus(x: Int, y: Int): Int = Math.addExact(x, y) + override def plus(x: Int, y: Int): Int = MathUtils.addExact(x, y) - override def minus(x: Int, y: Int): Int = Math.subtractExact(x, y) + override def minus(x: Int, y: Int): Int = MathUtils.subtractExact(x, y) - override def times(x: Int, y: Int): Int = Math.multiplyExact(x, y) + override def times(x: Int, y: Int): Int = MathUtils.multiplyExact(x, y) - override def negate(x: Int): Int = Math.negateExact(x) + override def negate(x: Int): Int = MathUtils.negateExact(x) } private[sql] object LongExactNumeric extends LongIsIntegral with Ordering.LongOrdering { - override def plus(x: Long, y: Long): Long = Math.addExact(x, y) + override def plus(x: Long, y: Long): Long = MathUtils.addExact(x, y) - override def minus(x: Long, y: Long): Long = Math.subtractExact(x, y) + override def minus(x: Long, y: Long): Long = MathUtils.subtractExact(x, y) - override def times(x: Long, y: Long): Long = Math.multiplyExact(x, y) + override def times(x: Long, y: Long): Long = MathUtils.multiplyExact(x, y) - override def negate(x: Long): Long = Math.negateExact(x) + override def negate(x: Long): Long = MathUtils.negateExact(x) override def toInt(x: Long): Int = if (x == x.toInt) { diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out index ca5423b3d1078..a34c1d4aa9bdf 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/decimalArithmeticOperations.sql.out @@ -76,7 +76,7 @@ select (5e36BD + 0.1) + 5e36BD struct<> -- !query output org.apache.spark.SparkArithmeticException -Decimal(expanded,10000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1). +Decimal(expanded,10000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1). You can set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -85,7 +85,7 @@ select (-4e36BD - 0.1) - 7e36BD struct<> -- !query output org.apache.spark.SparkArithmeticException -Decimal(expanded,-11000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1). +Decimal(expanded,-11000000000000000000000000000000000000.1,39,1}) cannot be represented as Decimal(38, 1). You can set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -94,7 +94,7 @@ select 12345678901234567890.0 * 12345678901234567890.0 struct<> -- !query output org.apache.spark.SparkArithmeticException -Decimal(expanded,152415787532388367501905199875019052100,39,0}) cannot be represented as Decimal(38, 2). +Decimal(expanded,152415787532388367501905199875019052100,39,0}) cannot be represented as Decimal(38, 2). You can set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -103,7 +103,7 @@ select 1e35BD / 0.1 struct<> -- !query output org.apache.spark.SparkArithmeticException -Decimal(expanded,1000000000000000000000000000000000000,37,0}) cannot be represented as Decimal(38, 6). +Decimal(expanded,1000000000000000000000000000000000000,37,0}) cannot be represented as Decimal(38, 6). You can set spark.sql.ansi.enabled to false to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index ecfa299fef2dc..7aacc4507a1fc 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -210,7 +210,7 @@ select interval '2 seconds' / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -243,7 +243,7 @@ select interval '2' year / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -1750,7 +1750,7 @@ select -(a) from values (interval '-2147483648 months', interval '2147483647 mon struct<> -- !query output java.lang.ArithmeticException -integer overflow +integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -1759,7 +1759,7 @@ select a - b from values (interval '-2147483648 months', interval '2147483647 mo struct<> -- !query output java.lang.ArithmeticException -integer overflow +integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -1768,7 +1768,7 @@ select b + interval '1 month' from values (interval '-2147483648 months', interv struct<> -- !query output java.lang.ArithmeticException -integer overflow +integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -1997,7 +1997,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1 struct<> -- !query output java.lang.ArithmeticException -Overflow in integral divide. +Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -2006,7 +2006,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L struct<> -- !query output java.lang.ArithmeticException -Overflow in integral divide. +Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -2049,7 +2049,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1 struct<> -- !query output java.lang.ArithmeticException -Overflow in integral divide. +Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -2058,7 +2058,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L struct<> -- !query output java.lang.ArithmeticException -Overflow in integral divide. +Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index be53288dbb14b..0e9b25eb14915 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -204,7 +204,7 @@ select interval '2 seconds' / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -237,7 +237,7 @@ select interval '2' year / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -1739,7 +1739,7 @@ select -(a) from values (interval '-2147483648 months', interval '2147483647 mon struct<> -- !query output java.lang.ArithmeticException -integer overflow +integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -1748,7 +1748,7 @@ select a - b from values (interval '-2147483648 months', interval '2147483647 mo struct<> -- !query output java.lang.ArithmeticException -integer overflow +integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -1757,7 +1757,7 @@ select b + interval '1 month' from values (interval '-2147483648 months', interv struct<> -- !query output java.lang.ArithmeticException -integer overflow +integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -1986,7 +1986,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1 struct<> -- !query output java.lang.ArithmeticException -Overflow in integral divide. +Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -1995,7 +1995,7 @@ SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L struct<> -- !query output java.lang.ArithmeticException -Overflow in integral divide. +Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -2038,7 +2038,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1 struct<> -- !query output java.lang.ArithmeticException -Overflow in integral divide. +Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -2047,7 +2047,7 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L struct<> -- !query output java.lang.ArithmeticException -Overflow in integral divide. +Overflow in integral divide. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/case.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/case.sql.out index 3e8c26339fcd1..122e530f702b2 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/case.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/case.sql.out @@ -179,7 +179,7 @@ SELECT CASE WHEN 1=0 THEN 1/0 WHEN 1=1 THEN 1 ELSE 2/0 END struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -188,7 +188,7 @@ SELECT CASE 1 WHEN 0 THEN 1/0 WHEN 1 THEN 1 ELSE 2/0 END struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -197,7 +197,7 @@ SELECT CASE WHEN i > 100 THEN 1/0 ELSE 0 END FROM case_tbl struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out index c6753d61212eb..41b59219a2f05 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out @@ -325,7 +325,7 @@ SELECT int(float('2147483647')) struct<> -- !query output org.apache.spark.SparkArithmeticException -Casting 2.14748365E9 to int causes overflow +Casting 2.14748365E9 to int causes overflow. You can use 'try_cast' or set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -342,7 +342,7 @@ SELECT int(float('-2147483900')) struct<> -- !query output org.apache.spark.SparkArithmeticException -Casting -2.1474839E9 to int causes overflow +Casting -2.1474839E9 to int causes overflow. You can use 'try_cast' or set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -375,7 +375,7 @@ SELECT bigint(float('-9223380000000000000')) struct<> -- !query output org.apache.spark.SparkArithmeticException -Casting -9.22338E18 to int causes overflow +Casting -9.22338E18 to int causes overflow. You can use 'try_cast' or set spark.sql.ansi.enabled to false to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out index 68acd3196cc4e..778dc3c484d2f 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out @@ -833,7 +833,7 @@ SELECT bigint(double('-9223372036854780000')) struct<> -- !query output org.apache.spark.SparkArithmeticException -Casting -9.22337203685478E18 to long causes overflow +Casting -9.22337203685478E18 to long causes overflow. You can use 'try_cast' or set spark.sql.ansi.enabled to false to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out index 2bb71645af0e5..aee6cf88082d1 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int4.sql.out @@ -200,7 +200,7 @@ SELECT '' AS five, i.f1, i.f1 * smallint('2') AS x FROM INT4_TBL i struct<> -- !query output java.lang.ArithmeticException -integer overflow +integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -220,7 +220,7 @@ SELECT '' AS five, i.f1, i.f1 * int('2') AS x FROM INT4_TBL i struct<> -- !query output java.lang.ArithmeticException -integer overflow +integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -240,7 +240,7 @@ SELECT '' AS five, i.f1, i.f1 + smallint('2') AS x FROM INT4_TBL i struct<> -- !query output java.lang.ArithmeticException -integer overflow +integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -261,7 +261,7 @@ SELECT '' AS five, i.f1, i.f1 + int('2') AS x FROM INT4_TBL i struct<> -- !query output java.lang.ArithmeticException -integer overflow +integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -282,7 +282,7 @@ SELECT '' AS five, i.f1, i.f1 - smallint('2') AS x FROM INT4_TBL i struct<> -- !query output java.lang.ArithmeticException -integer overflow +integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -303,7 +303,7 @@ SELECT '' AS five, i.f1, i.f1 - int('2') AS x FROM INT4_TBL i struct<> -- !query output java.lang.ArithmeticException -integer overflow +integer overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out index 81851ede898ab..6ed71a085be7d 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out @@ -392,7 +392,7 @@ SELECT '' AS three, q1, q2, q1 * q2 AS multiply FROM INT8_TBL struct<> -- !query output java.lang.ArithmeticException -long overflow +long overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -572,7 +572,7 @@ select bigint('9223372036854775800') / bigint('0') struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -581,7 +581,7 @@ select bigint('-9223372036854775808') / smallint('0') struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -590,7 +590,7 @@ select smallint('100') / bigint('0') struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -607,7 +607,7 @@ SELECT CAST(q1 AS int) FROM int8_tbl WHERE q2 <> 456 struct<> -- !query output org.apache.spark.SparkArithmeticException -Casting 4567890123456789 to int causes overflow +Casting 4567890123456789 to int causes overflow. You can use 'try_cast' or set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -624,7 +624,7 @@ SELECT CAST(q1 AS smallint) FROM int8_tbl WHERE q2 <> 456 struct<> -- !query output org.apache.spark.SparkArithmeticException -Casting 4567890123456789 to smallint causes overflow +Casting 4567890123456789 to smallint causes overflow. You can use 'try_cast' or set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -661,7 +661,7 @@ SELECT CAST(double('922337203685477580700.0') AS bigint) struct<> -- !query output org.apache.spark.SparkArithmeticException -Casting 9.223372036854776E20 to long causes overflow +Casting 9.223372036854776E20 to long causes overflow. You can use 'try_cast' or set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -733,7 +733,7 @@ SELECT string(int(shiftleft(bigint(-1), 63))+1) struct<> -- !query output org.apache.spark.SparkArithmeticException -Casting -9223372036854775808 to int causes overflow +Casting -9223372036854775808 to int causes overflow. You can use 'try_cast' or set spark.sql.ansi.enabled to false to bypass this error. -- !query @@ -742,7 +742,7 @@ SELECT bigint((-9223372036854775808)) * bigint((-1)) struct<> -- !query output java.lang.ArithmeticException -long overflow +long overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -767,7 +767,7 @@ SELECT bigint((-9223372036854775808)) * int((-1)) struct<> -- !query output java.lang.ArithmeticException -long overflow +long overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -792,7 +792,7 @@ SELECT bigint((-9223372036854775808)) * smallint((-1)) struct<> -- !query output java.lang.ArithmeticException -long overflow +long overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out index 1d096b5a5cfc8..9c8fb7d874db0 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out @@ -177,7 +177,7 @@ SELECT 1 AS one FROM test_having WHERE 1/a = 1 HAVING 1 < 2 struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out index 3d07d9f155bed..c57cd7eea6bb8 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out @@ -225,7 +225,7 @@ from range(9223372036854775804, 9223372036854775807) x struct<> -- !query output java.lang.ArithmeticException -long overflow +long overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -235,7 +235,7 @@ from range(-9223372036854775806, -9223372036854775805) x struct<> -- !query output java.lang.ArithmeticException -long overflow +long overflow. You can set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-case.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-case.sql.out index 7c2897a9da924..7b978eaa31132 100755 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-case.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-case.sql.out @@ -179,7 +179,7 @@ SELECT CASE WHEN udf(1=0) THEN 1/0 WHEN 1=1 THEN 1 ELSE 2/0 END struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -188,7 +188,7 @@ SELECT CASE 1 WHEN 0 THEN 1/udf(0) WHEN 1 THEN 1 ELSE 2/0 END struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query @@ -197,7 +197,7 @@ SELECT CASE WHEN i > 100 THEN udf(1/0) ELSE udf(0) END FROM case_tbl struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out index 3d6f34932c573..112b8dd435ce0 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out @@ -177,7 +177,7 @@ SELECT 1 AS one FROM test_having WHERE 1/udf(a) = 1 HAVING 1 < 2 struct<> -- !query output org.apache.spark.SparkArithmeticException -divide by zero +divide by zero. You can use 'try_divide' or set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error. -- !query