From af705421db172471d57d92fc7feee3179169a68e Mon Sep 17 00:00:00 2001 From: Kent Yao Date: Tue, 21 Jan 2020 20:51:10 +0800 Subject: [PATCH] [SPARK-30593][SQL] Revert interval ISO/ANSI SQL Standard output since we decide not to follow ANSI and no round trip ### What changes were proposed in this pull request? This revert https://github.com/apache/spark/pull/26418, file a new ticket under https://issues.apache.org/jira/browse/SPARK-30546 for better tracking interval behavior ### Why are the changes needed? Revert interval ISO/ANSI SQL Standard output since we decide not to follow ANSI and there is no round trip ### Does this PR introduce any user-facing change? no, not released yet ### How was this patch tested? existing uts Closes #27304 from yaooqinn/SPARK-30593. Authored-by: Kent Yao Signed-off-by: Wenchen Fan --- .../unsafe/types/CalendarIntervalSuite.java | 30 +++++++ .../spark/sql/catalyst/expressions/Cast.scala | 20 +---- .../sql/catalyst/expressions/literals.scala | 2 +- .../sql/catalyst/util/IntervalUtils.scala | 79 ------------------- .../apache/spark/sql/internal/SQLConf.scala | 19 ----- .../catalyst/util/IntervalUtilsSuite.scala | 64 --------------- .../spark/sql/execution/HiveResult.scala | 9 +-- .../inputs/interval-display-iso_8601.sql | 3 - .../inputs/interval-display-sql_standard.sql | 3 - .../sql-tests/inputs/interval-display.sql | 14 ---- .../sql-tests/inputs/postgreSQL/interval.sql | 10 +-- .../results/interval-display-iso_8601.sql.out | 21 ----- .../interval-display-sql_standard.sql.out | 21 ----- .../results/interval-display.sql.out | 21 ----- .../results/postgreSQL/interval.sql.out | 29 +------ .../apache/spark/sql/DateFunctionsSuite.scala | 8 +- 16 files changed, 44 insertions(+), 309 deletions(-) delete mode 100644 sql/core/src/test/resources/sql-tests/inputs/interval-display-iso_8601.sql delete mode 100644 sql/core/src/test/resources/sql-tests/inputs/interval-display-sql_standard.sql delete mode 100644 sql/core/src/test/resources/sql-tests/inputs/interval-display.sql delete mode 100644 sql/core/src/test/resources/sql-tests/results/interval-display-iso_8601.sql.out delete mode 100644 sql/core/src/test/resources/sql-tests/results/interval-display-sql_standard.sql.out delete mode 100644 sql/core/src/test/resources/sql-tests/results/interval-display.sql.out diff --git a/common/unsafe/src/test/java/org/apache/spark/unsafe/types/CalendarIntervalSuite.java b/common/unsafe/src/test/java/org/apache/spark/unsafe/types/CalendarIntervalSuite.java index 01bf7eb2438ad..6397f26c02f3a 100644 --- a/common/unsafe/src/test/java/org/apache/spark/unsafe/types/CalendarIntervalSuite.java +++ b/common/unsafe/src/test/java/org/apache/spark/unsafe/types/CalendarIntervalSuite.java @@ -46,6 +46,36 @@ public void equalsTest() { assertEquals(i1, i6); } + @Test + public void toStringTest() { + CalendarInterval i; + + i = new CalendarInterval(0, 0, 0); + assertEquals("0 seconds", i.toString()); + + i = new CalendarInterval(34, 0, 0); + assertEquals("2 years 10 months", i.toString()); + + i = new CalendarInterval(-34, 0, 0); + assertEquals("-2 years -10 months", i.toString()); + + i = new CalendarInterval(0, 31, 0); + assertEquals("31 days", i.toString()); + + i = new CalendarInterval(0, -31, 0); + assertEquals("-31 days", i.toString()); + + i = new CalendarInterval(0, 0, 3 * MICROS_PER_HOUR + 13 * MICROS_PER_MINUTE + 123); + assertEquals("3 hours 13 minutes 0.000123 seconds", i.toString()); + + i = new CalendarInterval(0, 0, -3 * MICROS_PER_HOUR - 13 * MICROS_PER_MINUTE - 123); + assertEquals("-3 hours -13 minutes -0.000123 seconds", i.toString()); + + i = new CalendarInterval(34, 31, 3 * MICROS_PER_HOUR + 13 * MICROS_PER_MINUTE + 123); + assertEquals("2 years 10 months 31 days 3 hours 13 minutes 0.000123 seconds", + i.toString()); + } + @Test public void periodAndDurationTest() { CalendarInterval interval = new CalendarInterval(120, -40, 123456); diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index 4fd74a4e4658b..05b4fbef2b697 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -30,9 +30,7 @@ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.util._ import org.apache.spark.sql.catalyst.util.DateTimeConstants._ import org.apache.spark.sql.catalyst.util.DateTimeUtils._ -import org.apache.spark.sql.catalyst.util.IntervalUtils._ import org.apache.spark.sql.internal.SQLConf -import org.apache.spark.sql.internal.SQLConf.IntervalStyle._ import org.apache.spark.sql.types._ import org.apache.spark.unsafe.UTF8StringBuilder import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} @@ -283,14 +281,8 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit // UDFToString private[this] def castToString(from: DataType): Any => Any = from match { - case CalendarIntervalType => SQLConf.get.intervalOutputStyle match { - case SQL_STANDARD => - buildCast[CalendarInterval](_, i => UTF8String.fromString(toSqlStandardString(i))) - case ISO_8601 => - buildCast[CalendarInterval](_, i => UTF8String.fromString(toIso8601String(i))) - case MULTI_UNITS => - buildCast[CalendarInterval](_, i => UTF8String.fromString(toMultiUnitsString(i))) - } + case CalendarIntervalType => + buildCast[CalendarInterval](_, i => UTF8String.fromString(i.toString)) case BinaryType => buildCast[Array[Byte]](_, UTF8String.fromBytes) case DateType => buildCast[Int](_, d => UTF8String.fromString(dateFormatter.format(d))) case TimestampType => buildCast[Long](_, @@ -1021,13 +1013,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit (c, evPrim, evNull) => code"""$evPrim = UTF8String.fromString( org.apache.spark.sql.catalyst.util.DateTimeUtils.timestampToString($tf, $c));""" case CalendarIntervalType => - val iu = IntervalUtils.getClass.getCanonicalName.stripSuffix("$") - val funcName = SQLConf.get.intervalOutputStyle match { - case SQL_STANDARD => "toSqlStandardString" - case ISO_8601 => "toIso8601String" - case MULTI_UNITS => "toMultiUnitsString" - } - (c, evPrim, _) => code"""$evPrim = UTF8String.fromString($iu.$funcName($c));""" + (c, evPrim, _) => code"""$evPrim = UTF8String.fromString($c.toString());""" case ArrayType(et, _) => (c, evPrim, evNull) => { val buffer = ctx.freshVariable("buffer", classOf[UTF8StringBuilder]) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala index 48b8c9c0fbf8b..371a29ecf83ab 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala @@ -409,7 +409,7 @@ case class Literal (value: Any, dataType: DataType) extends LeafExpression { DateTimeUtils.getZoneId(SQLConf.get.sessionLocalTimeZone)) s"TIMESTAMP '${formatter.format(v)}'" case (i: CalendarInterval, CalendarIntervalType) => - s"INTERVAL '${IntervalUtils.toMultiUnitsString(i)}'" + s"INTERVAL '${i.toString}'" case (v: Array[Byte], BinaryType) => s"X'${DatatypeConverter.printHexBinary(v)}'" case _ => value.toString } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index 8763f24b05edc..7692299a46ef5 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -497,85 +497,6 @@ object IntervalUtils { fromDoubles(interval.months / num, interval.days / num, interval.microseconds / num) } - // `toString` implementation in CalendarInterval is the multi-units format currently. - def toMultiUnitsString(interval: CalendarInterval): String = interval.toString - - def toSqlStandardString(interval: CalendarInterval): String = { - val yearMonthPart = if (interval.months < 0) { - val ma = math.abs(interval.months) - "-" + ma / 12 + "-" + ma % 12 - } else if (interval.months > 0) { - "+" + interval.months / 12 + "-" + interval.months % 12 - } else { - "" - } - - val dayPart = if (interval.days < 0) { - interval.days.toString - } else if (interval.days > 0) { - "+" + interval.days - } else { - "" - } - - val timePart = if (interval.microseconds != 0) { - val sign = if (interval.microseconds > 0) "+" else "-" - val sb = new StringBuilder(sign) - var rest = math.abs(interval.microseconds) - sb.append(rest / MICROS_PER_HOUR) - sb.append(':') - rest %= MICROS_PER_HOUR - val minutes = rest / MICROS_PER_MINUTE; - if (minutes < 10) { - sb.append(0) - } - sb.append(minutes) - sb.append(':') - rest %= MICROS_PER_MINUTE - val bd = BigDecimal.valueOf(rest, 6) - if (bd.compareTo(new BigDecimal(10)) < 0) { - sb.append(0) - } - val s = bd.stripTrailingZeros().toPlainString - sb.append(s) - sb.toString() - } else { - "" - } - - val intervalList = Seq(yearMonthPart, dayPart, timePart).filter(_.nonEmpty) - if (intervalList.nonEmpty) intervalList.mkString(" ") else "0" - } - - def toIso8601String(interval: CalendarInterval): String = { - val sb = new StringBuilder("P") - - val year = interval.months / 12 - if (year != 0) sb.append(year + "Y") - val month = interval.months % 12 - if (month != 0) sb.append(month + "M") - - if (interval.days != 0) sb.append(interval.days + "D") - - if (interval.microseconds != 0) { - sb.append('T') - var rest = interval.microseconds - val hour = rest / MICROS_PER_HOUR - if (hour != 0) sb.append(hour + "H") - rest %= MICROS_PER_HOUR - val minute = rest / MICROS_PER_MINUTE - if (minute != 0) sb.append(minute + "M") - rest %= MICROS_PER_MINUTE - if (rest != 0) { - val bd = BigDecimal.valueOf(rest, 6) - sb.append(bd.stripTrailingZeros().toPlainString + "S") - } - } else if (interval.days == 0 && interval.months == 0) { - sb.append("T0S") - } - sb.toString() - } - private object ParseState extends Enumeration { type ParseState = Value diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 279c79ff14080..45ca022c36f72 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -1799,23 +1799,6 @@ object SQLConf { .checkValues(StoreAssignmentPolicy.values.map(_.toString)) .createWithDefault(StoreAssignmentPolicy.ANSI.toString) - object IntervalStyle extends Enumeration { - type IntervalStyle = Value - val SQL_STANDARD, ISO_8601, MULTI_UNITS = Value - } - - val INTERVAL_STYLE = buildConf("spark.sql.intervalOutputStyle") - .doc("When converting interval values to strings (i.e. for display), this config decides the" + - " interval string format. The value SQL_STANDARD will produce output matching SQL standard" + - " interval literals (i.e. '+3-2 +10 -00:00:01'). The value ISO_8601 will produce output" + - " matching the ISO 8601 standard (i.e. 'P3Y2M10DT-1S'). The value MULTI_UNITS (which is the" + - " default) will produce output in form of value unit pairs, (i.e. '3 year 2 months 10 days" + - " -1 seconds'") - .stringConf - .transform(_.toUpperCase(Locale.ROOT)) - .checkValues(IntervalStyle.values.map(_.toString)) - .createWithDefault(IntervalStyle.MULTI_UNITS.toString) - val ANSI_ENABLED = buildConf("spark.sql.ansi.enabled") .doc("When true, Spark tries to conform to the ANSI SQL specification: 1. Spark will " + "throw a runtime exception if an overflow occurs in any operation on integral/decimal " + @@ -2667,8 +2650,6 @@ class SQLConf extends Serializable with Logging { def storeAssignmentPolicy: StoreAssignmentPolicy.Value = StoreAssignmentPolicy.withName(getConf(STORE_ASSIGNMENT_POLICY)) - def intervalOutputStyle: IntervalStyle.Value = IntervalStyle.withName(getConf(INTERVAL_STYLE)) - def ansiEnabled: Boolean = getConf(ANSI_ENABLED) def nestedSchemaPruningEnabled: Boolean = getConf(NESTED_SCHEMA_PRUNING_ENABLED) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala index a0b4387331e5d..514804cbda16c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala @@ -304,70 +304,6 @@ class IntervalUtilsSuite extends SparkFunSuite with SQLHelper { } } - test("to ansi sql standard string") { - val i1 = new CalendarInterval(0, 0, 0) - assert(IntervalUtils.toSqlStandardString(i1) === "0") - val i2 = new CalendarInterval(34, 0, 0) - assert(IntervalUtils.toSqlStandardString(i2) === "+2-10") - val i3 = new CalendarInterval(-34, 0, 0) - assert(IntervalUtils.toSqlStandardString(i3) === "-2-10") - val i4 = new CalendarInterval(0, 31, 0) - assert(IntervalUtils.toSqlStandardString(i4) === "+31") - val i5 = new CalendarInterval(0, -31, 0) - assert(IntervalUtils.toSqlStandardString(i5) === "-31") - val i6 = new CalendarInterval(0, 0, 3 * MICROS_PER_HOUR + 13 * MICROS_PER_MINUTE + 123) - assert(IntervalUtils.toSqlStandardString(i6) === "+3:13:00.000123") - val i7 = new CalendarInterval(0, 0, -3 * MICROS_PER_HOUR - 13 * MICROS_PER_MINUTE - 123) - assert(IntervalUtils.toSqlStandardString(i7) === "-3:13:00.000123") - val i8 = new CalendarInterval(-34, 31, 3 * MICROS_PER_HOUR + 13 * MICROS_PER_MINUTE + 123) - assert(IntervalUtils.toSqlStandardString(i8) === "-2-10 +31 +3:13:00.000123") - val i9 = new CalendarInterval(0, 0, -3000 * MICROS_PER_HOUR) - assert(IntervalUtils.toSqlStandardString(i9) === "-3000:00:00") - } - - test("to iso 8601 string") { - val i1 = new CalendarInterval(0, 0, 0) - assert(IntervalUtils.toIso8601String(i1) === "PT0S") - val i2 = new CalendarInterval(34, 0, 0) - assert(IntervalUtils.toIso8601String(i2) === "P2Y10M") - val i3 = new CalendarInterval(-34, 0, 0) - assert(IntervalUtils.toIso8601String(i3) === "P-2Y-10M") - val i4 = new CalendarInterval(0, 31, 0) - assert(IntervalUtils.toIso8601String(i4) === "P31D") - val i5 = new CalendarInterval(0, -31, 0) - assert(IntervalUtils.toIso8601String(i5) === "P-31D") - val i6 = new CalendarInterval(0, 0, 3 * MICROS_PER_HOUR + 13 * MICROS_PER_MINUTE + 123) - assert(IntervalUtils.toIso8601String(i6) === "PT3H13M0.000123S") - val i7 = new CalendarInterval(0, 0, -3 * MICROS_PER_HOUR - 13 * MICROS_PER_MINUTE - 123) - assert(IntervalUtils.toIso8601String(i7) === "PT-3H-13M-0.000123S") - val i8 = new CalendarInterval(-34, 31, 3 * MICROS_PER_HOUR + 13 * MICROS_PER_MINUTE + 123) - assert(IntervalUtils.toIso8601String(i8) === "P-2Y-10M31DT3H13M0.000123S") - val i9 = new CalendarInterval(0, 0, -3000 * MICROS_PER_HOUR) - assert(IntervalUtils.toIso8601String(i9) === "PT-3000H") - } - - test("to multi units string") { - val i1 = new CalendarInterval(0, 0, 0) - assert(IntervalUtils.toMultiUnitsString(i1) === "0 seconds") - val i2 = new CalendarInterval(34, 0, 0) - assert(IntervalUtils.toMultiUnitsString(i2) === "2 years 10 months") - val i3 = new CalendarInterval(-34, 0, 0) - assert(IntervalUtils.toMultiUnitsString(i3) === "-2 years -10 months") - val i4 = new CalendarInterval(0, 31, 0) - assert(IntervalUtils.toMultiUnitsString(i4) === "31 days") - val i5 = new CalendarInterval(0, -31, 0) - assert(IntervalUtils.toMultiUnitsString(i5) === "-31 days") - val i6 = new CalendarInterval(0, 0, 3 * MICROS_PER_HOUR + 13 * MICROS_PER_MINUTE + 123) - assert(IntervalUtils.toMultiUnitsString(i6) === "3 hours 13 minutes 0.000123 seconds") - val i7 = new CalendarInterval(0, 0, -3 * MICROS_PER_HOUR - 13 * MICROS_PER_MINUTE - 123) - assert(IntervalUtils.toMultiUnitsString(i7) === "-3 hours -13 minutes -0.000123 seconds") - val i8 = new CalendarInterval(-34, 31, 3 * MICROS_PER_HOUR + 13 * MICROS_PER_MINUTE + 123) - assert(IntervalUtils.toMultiUnitsString(i8) === - "-2 years -10 months 31 days 3 hours 13 minutes 0.000123 seconds") - val i9 = new CalendarInterval(0, 0, -3000 * MICROS_PER_HOUR) - assert(IntervalUtils.toMultiUnitsString(i9) === "-3000 hours") - } - test("from day-time string") { def check(input: String, from: IntervalUnit, to: IntervalUnit, expected: String): Unit = { withClue(s"from = $from, to = $to") { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/HiveResult.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/HiveResult.scala index c92b10cc03645..bbe47a63f4d61 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/HiveResult.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/HiveResult.scala @@ -22,10 +22,8 @@ import java.sql.{Date, Timestamp} import org.apache.spark.sql.Row import org.apache.spark.sql.catalyst.util.{DateFormatter, DateTimeUtils, TimestampFormatter} -import org.apache.spark.sql.catalyst.util.IntervalUtils._ import org.apache.spark.sql.execution.command.{DescribeCommandBase, ExecutedCommandExec, ShowTablesCommand} import org.apache.spark.sql.internal.SQLConf -import org.apache.spark.sql.internal.SQLConf.IntervalStyle._ import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.CalendarInterval @@ -75,12 +73,7 @@ object HiveResult { case (decimal: java.math.BigDecimal, DecimalType()) => decimal.toPlainString case (n, _: NumericType) => n.toString case (s: String, StringType) => if (nested) "\"" + s + "\"" else s - case (interval: CalendarInterval, CalendarIntervalType) => - SQLConf.get.intervalOutputStyle match { - case SQL_STANDARD => toSqlStandardString(interval) - case ISO_8601 => toIso8601String(interval) - case MULTI_UNITS => toMultiUnitsString(interval) - } + case (interval: CalendarInterval, CalendarIntervalType) => interval.toString case (seq: Seq[_], ArrayType(typ, _)) => seq.map(v => (v, typ)).map(e => toHiveString(e, true)).mkString("[", ",", "]") case (m: Map[_, _], MapType(kType, vType, _)) => diff --git a/sql/core/src/test/resources/sql-tests/inputs/interval-display-iso_8601.sql b/sql/core/src/test/resources/sql-tests/inputs/interval-display-iso_8601.sql deleted file mode 100644 index 3b63c715a6aa1..0000000000000 --- a/sql/core/src/test/resources/sql-tests/inputs/interval-display-iso_8601.sql +++ /dev/null @@ -1,3 +0,0 @@ --- tests for interval output style with iso_8601 format ---SET spark.sql.intervalOutputStyle = ISO_8601 ---IMPORT interval-display.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/interval-display-sql_standard.sql b/sql/core/src/test/resources/sql-tests/inputs/interval-display-sql_standard.sql deleted file mode 100644 index d96865b160bb6..0000000000000 --- a/sql/core/src/test/resources/sql-tests/inputs/interval-display-sql_standard.sql +++ /dev/null @@ -1,3 +0,0 @@ --- tests for interval output style with sql standard format ---SET spark.sql.intervalOutputStyle = SQL_STANDARD ---IMPORT interval-display.sql diff --git a/sql/core/src/test/resources/sql-tests/inputs/interval-display.sql b/sql/core/src/test/resources/sql-tests/inputs/interval-display.sql deleted file mode 100644 index ae19f1b6374ba..0000000000000 --- a/sql/core/src/test/resources/sql-tests/inputs/interval-display.sql +++ /dev/null @@ -1,14 +0,0 @@ --- tests for interval output style - -SELECT - cast(null as interval), -- null - interval '0 day', -- 0 - interval '1 year', -- year only - interval '1 month', -- month only - interval '1 year 2 month', -- year month only - interval '1 day -1 hours', - interval '-1 day -1 hours', - interval '-1 day 1 hours', - interval '-1 days +1 hours', - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds', - - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds'; diff --git a/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/interval.sql b/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/interval.sql index 7edcae59c49ff..eb8cc34419519 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/interval.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/interval.sql @@ -272,12 +272,10 @@ SELECT interval '1 2:03:04' minute to second; -- test output of couple non-standard interval values in the sql style -- [SPARK-29406] Interval output styles -- SET IntervalStyle TO sql_standard; -set spark.sql.intervalOutputStyle=SQL_STANDARD; -SELECT interval '1 day -1 hours', - interval '-1 days +1 hours', - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds', - - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds'; -set spark.sql.intervalOutputStyle=MULTI_UNITS; +-- SELECT interval '1 day -1 hours', +-- interval '-1 days +1 hours', +-- interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds', +-- - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds'; -- test outputting iso8601 intervals -- [SPARK-29406] Interval output styles diff --git a/sql/core/src/test/resources/sql-tests/results/interval-display-iso_8601.sql.out b/sql/core/src/test/resources/sql-tests/results/interval-display-iso_8601.sql.out deleted file mode 100644 index 2da098f17e168..0000000000000 --- a/sql/core/src/test/resources/sql-tests/results/interval-display-iso_8601.sql.out +++ /dev/null @@ -1,21 +0,0 @@ --- Automatically generated by SQLQueryTestSuite --- Number of queries: 1 - - --- !query 0 -SELECT - cast(null as interval), -- null - interval '0 day', -- 0 - interval '1 year', -- year only - interval '1 month', -- month only - interval '1 year 2 month', -- year month only - interval '1 day -1 hours', - interval '-1 day -1 hours', - interval '-1 day 1 hours', - interval '-1 days +1 hours', - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds', - - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds' --- !query 0 schema -struct --- !query 0 output -NULL PT0S P1Y P1M P1Y2M P1DT-1H P-1DT-1H P-1DT1H P-1DT1H P1Y2M-3DT4H5M6.789S P-1Y-2M3DT-4H-5M-6.789S diff --git a/sql/core/src/test/resources/sql-tests/results/interval-display-sql_standard.sql.out b/sql/core/src/test/resources/sql-tests/results/interval-display-sql_standard.sql.out deleted file mode 100644 index 7ef2f9c51218e..0000000000000 --- a/sql/core/src/test/resources/sql-tests/results/interval-display-sql_standard.sql.out +++ /dev/null @@ -1,21 +0,0 @@ --- Automatically generated by SQLQueryTestSuite --- Number of queries: 1 - - --- !query 0 -SELECT - cast(null as interval), -- null - interval '0 day', -- 0 - interval '1 year', -- year only - interval '1 month', -- month only - interval '1 year 2 month', -- year month only - interval '1 day -1 hours', - interval '-1 day -1 hours', - interval '-1 day 1 hours', - interval '-1 days +1 hours', - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds', - - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds' --- !query 0 schema -struct --- !query 0 output -NULL 0 +1-0 +0-1 +1-2 +1 -1:00:00 -1 -1:00:00 -1 +1:00:00 -1 +1:00:00 +1-2 -3 +4:05:06.789 -1-2 +3 -4:05:06.789 diff --git a/sql/core/src/test/resources/sql-tests/results/interval-display.sql.out b/sql/core/src/test/resources/sql-tests/results/interval-display.sql.out deleted file mode 100644 index a292fe3c83028..0000000000000 --- a/sql/core/src/test/resources/sql-tests/results/interval-display.sql.out +++ /dev/null @@ -1,21 +0,0 @@ --- Automatically generated by SQLQueryTestSuite --- Number of queries: 1 - - --- !query 0 -SELECT - cast(null as interval), -- null - interval '0 day', -- 0 - interval '1 year', -- year only - interval '1 month', -- month only - interval '1 year 2 month', -- year month only - interval '1 day -1 hours', - interval '-1 day -1 hours', - interval '-1 day 1 hours', - interval '-1 days +1 hours', - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds', - - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds' --- !query 0 schema -struct --- !query 0 output -NULL 0 seconds 1 years 1 months 1 years 2 months 1 days -1 hours -1 days -1 hours -1 days 1 hours -1 days 1 hours 1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds -1 years -2 months 3 days -4 hours -5 minutes -6.789 seconds diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out index 6a6aed1d9bf68..cb906c53c84e7 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out @@ -1,5 +1,5 @@ -- Automatically generated by SQLQueryTestSuite --- Number of queries: 27 +-- Number of queries: 24 -- !query 0 @@ -252,30 +252,3 @@ requirement failed: Interval string must match day-time format of '^(?[+|- == SQL == SELECT interval '1 2:03:04' minute to second ----------------^^^ - - --- !query 24 -set spark.sql.intervalOutputStyle=SQL_STANDARD --- !query 24 schema -struct --- !query 24 output -spark.sql.intervalOutputStyle SQL_STANDARD - - --- !query 25 -SELECT interval '1 day -1 hours', - interval '-1 days +1 hours', - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds', - - interval '1 years 2 months -3 days 4 hours 5 minutes 6.789 seconds' --- !query 25 schema -struct --- !query 25 output -+1 -1:00:00 -1 +1:00:00 +1-2 -3 +4:05:06.789 -1-2 +3 -4:05:06.789 - - --- !query 26 -set spark.sql.intervalOutputStyle=MULTI_UNITS --- !query 26 schema -struct --- !query 26 output -spark.sql.intervalOutputStyle MULTI_UNITS diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala index 7b6b935496678..72d323e88406e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala @@ -293,10 +293,10 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession { val i = new CalendarInterval(2, 2, 2000000L) val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d") checkAnswer( - df.selectExpr(s"d + INTERVAL'${IntervalUtils.toMultiUnitsString(i)}'"), + df.selectExpr(s"d + INTERVAL'${i.toString}'"), Seq(Row(Date.valueOf("2015-10-02")), Row(Date.valueOf("2016-03-02")))) checkAnswer( - df.selectExpr(s"t + INTERVAL'${IntervalUtils.toMultiUnitsString(i)}'"), + df.selectExpr(s"t + INTERVAL'${i.toString}'"), Seq(Row(Timestamp.valueOf("2015-10-03 00:00:01")), Row(Timestamp.valueOf("2016-03-02 00:00:02")))) } @@ -309,10 +309,10 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession { val i = new CalendarInterval(2, 2, 2000000L) val df = Seq((1, t1, d1), (3, t2, d2)).toDF("n", "t", "d") checkAnswer( - df.selectExpr(s"d - INTERVAL'${IntervalUtils.toMultiUnitsString(i)}'"), + df.selectExpr(s"d - INTERVAL'${i.toString}'"), Seq(Row(Date.valueOf("2015-07-27")), Row(Date.valueOf("2015-12-26")))) checkAnswer( - df.selectExpr(s"t - INTERVAL'${IntervalUtils.toMultiUnitsString(i)}'"), + df.selectExpr(s"t - INTERVAL'${i.toString}'"), Seq(Row(Timestamp.valueOf("2015-07-29 23:59:59")), Row(Timestamp.valueOf("2015-12-27 00:00:00")))) }