From 0b69a1f21d758ba079ed5e73072caca413398361 Mon Sep 17 00:00:00 2001 From: Daoyuan Wang Date: Wed, 24 Jun 2015 05:06:13 -0700 Subject: [PATCH 1/7] udf current --- .../expressions/datetimeFunctions.scala | 53 +++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala new file mode 100644 index 0000000000000..00e269419020c --- /dev/null +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.catalyst.expressions + +import org.apache.spark.sql.catalyst.util.DateTimeUtils +import org.apache.spark.sql.types._ + +/** + * Adds a number of days to startdate: date_add('2008-12-31', 1) = '2009-01-01'. + */ +case class CurrentTimestamp() extends Expression { + override def children: Seq[Expression] = Nil + + override def foldable: Boolean = true + override def nullable: Boolean = false + + override def dataType: DataType = TimestampType + + override def eval(input: InternalRow): Any = { + (new java.util.Date()).getTime * 10000L + } +} + +/** + * Subtracts a number of days to startdate: date_sub('2008-12-31', 1) = '2008-12-30'. + */ +case class CurrentDate() extends Expression { + override def children: Seq[Expression] = Nil + + override def foldable: Boolean = true + override def nullable: Boolean = false + + override def dataType: DataType = DateType + + override def eval(input: InternalRow): Any = { + DateTimeUtils.millisToDays((new java.util.Date()).getTime) + } +} From 427d9dc88df88449b127788702826acc685a1d5e Mon Sep 17 00:00:00 2001 From: Daoyuan Wang Date: Thu, 2 Jul 2015 02:46:42 -0700 Subject: [PATCH 2/7] add tests and codegen --- .../catalyst/analysis/FunctionRegistry.scala | 6 ++- .../expressions/datetimeFunctions.scala | 39 +++++++++++----- .../expressions/DatetimeFunctionsSuite.scala | 40 ++++++++++++++++ .../spark/sql/DatetimeExpressionsSuite.scala | 46 +++++++++++++++++++ 4 files changed, 118 insertions(+), 13 deletions(-) create mode 100644 sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala create mode 100644 sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala index 6f04298d4711b..c82a2f0879dda 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala @@ -158,7 +158,11 @@ object FunctionRegistry { expression[Substring]("substring"), expression[Upper]("ucase"), expression[UnHex]("unhex"), - expression[Upper]("upper") + expression[Upper]("upper"), + + //datetime functions + expression[CurrentDate]("current_date"), + expression[CurrentTimestamp]("current_timestamp") ) val builtin: FunctionRegistry = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala index 00e269419020c..571dca88809eb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala @@ -17,37 +17,52 @@ package org.apache.spark.sql.catalyst.expressions +import org.apache.spark.sql.catalyst.expressions.codegen.{CodeGenContext, GeneratedExpressionCode} import org.apache.spark.sql.catalyst.util.DateTimeUtils import org.apache.spark.sql.types._ /** - * Adds a number of days to startdate: date_add('2008-12-31', 1) = '2009-01-01'. + * Returns the current date at the start of query evaluation. + * All calls of current_date within the same query return the same value. */ -case class CurrentTimestamp() extends Expression { - override def children: Seq[Expression] = Nil - +case class CurrentDate() extends LeafExpression { override def foldable: Boolean = true override def nullable: Boolean = false - override def dataType: DataType = TimestampType + override def dataType: DataType = DateType override def eval(input: InternalRow): Any = { - (new java.util.Date()).getTime * 10000L + DateTimeUtils.millisToDays(System.currentTimeMillis()) + } + + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { + val datetimeUtils = "org.apache.spark.sql.catalyst.util.DateTimeUtils" + s""" + boolean ${ev.isNull} = false; + ${ctx.javaType(dataType)} ${ev.primitive} = + $datetimeUtils.millisToDays(System.currentTimeMillis()); + """ } } /** - * Subtracts a number of days to startdate: date_sub('2008-12-31', 1) = '2008-12-30'. + * Returns the current timestamp at the start of query evaluation. + * All calls of current_timestamp within the same query return the same value. */ -case class CurrentDate() extends Expression { - override def children: Seq[Expression] = Nil - +case class CurrentTimestamp() extends LeafExpression { override def foldable: Boolean = true override def nullable: Boolean = false - override def dataType: DataType = DateType + override def dataType: DataType = TimestampType override def eval(input: InternalRow): Any = { - DateTimeUtils.millisToDays((new java.util.Date()).getTime) + System.currentTimeMillis() * 10000L + } + + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { + s""" + boolean ${ev.isNull} = false; + ${ctx.javaType(dataType)} ${ev.primitive} = System.currentTimeMillis() * 10000L; + """ } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala new file mode 100644 index 0000000000000..8503f4d2307a9 --- /dev/null +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.catalyst.expressions + +import java.sql.Date + +import org.apache.spark.SparkFunSuite +import org.apache.spark.sql.types.DateType + +class DatetimeFunctionsSuite extends SparkFunSuite with ExpressionEvalHelper { + test("datetime function current_date") { + checkEvaluation( + CurrentDate(), + new Date(System.currentTimeMillis), EmptyRow) + } + + test("datetime function current_timestamp") { + // By the time we run check, current timestamp has been different. + // So we just check the date part. + checkEvaluation( + Cast(CurrentTimestamp(), DateType), + new Date(System.currentTimeMillis), EmptyRow) + } + +} \ No newline at end of file diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala new file mode 100644 index 0000000000000..158c45743420a --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql + +import java.sql.Date + +import org.apache.spark.sql.catalyst.util.DateTimeUtils +import org.scalatest.BeforeAndAfterAll + +class DatetimeExpressionsSuite extends QueryTest with BeforeAndAfterAll { + private lazy val ctx = org.apache.spark.sql.test.TestSQLContext + + test("function current_date") { + // Date constructor would keep the original millis, we need to align it with begin of day. + checkAnswer(ctx.sql("""SELECT CURRENT_DATE()"""), + Row(new Date(DateTimeUtils.daysToMillis( + DateTimeUtils.millisToDays(System.currentTimeMillis()))))) + } + + test("function current_timestamp") { + // Execution in one query should return the same value + checkAnswer(ctx.sql("""SELECT CURRENT_TIMESTAMP() = CURRENT_TIMESTAMP()"""), + Row(true)) + // By the time we run check, current timestamp has been different. + // So we just check the date part. + checkAnswer(ctx.sql("""SELECT CAST(CURRENT_TIMESTAMP() AS DATE)"""), + Row(new Date(DateTimeUtils.daysToMillis( + DateTimeUtils.millisToDays(System.currentTimeMillis()))))) + } + +} \ No newline at end of file From 98e8550ea920d484201f3858a8b16b02c7dc12f0 Mon Sep 17 00:00:00 2001 From: Daoyuan Wang Date: Thu, 2 Jul 2015 03:08:35 -0700 Subject: [PATCH 3/7] fix sytle --- .../apache/spark/sql/catalyst/analysis/FunctionRegistry.scala | 2 +- .../spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala | 2 +- .../scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala index c82a2f0879dda..9c60408aa83fb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala @@ -160,7 +160,7 @@ object FunctionRegistry { expression[UnHex]("unhex"), expression[Upper]("upper"), - //datetime functions + // datetime functions expression[CurrentDate]("current_date"), expression[CurrentTimestamp]("current_timestamp") ) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala index 8503f4d2307a9..885f7caa4db2c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala @@ -37,4 +37,4 @@ class DatetimeFunctionsSuite extends SparkFunSuite with ExpressionEvalHelper { new Date(System.currentTimeMillis), EmptyRow) } -} \ No newline at end of file +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala index 158c45743420a..a9c06a773f989 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala @@ -43,4 +43,4 @@ class DatetimeExpressionsSuite extends QueryTest with BeforeAndAfterAll { DateTimeUtils.millisToDays(System.currentTimeMillis()))))) } -} \ No newline at end of file +} From 61ed3d51e725cc4ce06a20f1ef9cf3c11b146110 Mon Sep 17 00:00:00 2001 From: Daoyuan Wang Date: Thu, 2 Jul 2015 09:58:47 -0700 Subject: [PATCH 4/7] add in functions --- .../scala/org/apache/spark/sql/functions.scala | 17 +++++++++++++++++ .../spark/sql/DatetimeExpressionsSuite.scala | 15 +++++++++++++-- 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala index e6f623bdf39eb..215a3e7a451ba 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala @@ -34,6 +34,7 @@ import org.apache.spark.util.Utils * * @groupname udf_funcs UDF functions * @groupname agg_funcs Aggregate functions + * @groupname datetime_funcs Date time functions * @groupname sort_funcs Sorting functions * @groupname normal_funcs Non-aggregate functions * @groupname math_funcs Math functions @@ -989,6 +990,22 @@ object functions { */ def cosh(columnName: String): Column = cosh(Column(columnName)) + /** + * Returns the current date. + * + * @group datetime_funcs + * @since 1.5.0 + */ + def current_date(): Column = CurrentDate() + + /** + * Returns the current timestamp. + * + * @group datetime_funcs + * @since 1.5.0 + */ + def current_timestamp(): Column = CurrentTimestamp() + /** * Computes the exponential of the given value. * diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala index a9c06a773f989..cb6ddce13c634 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala @@ -20,19 +20,30 @@ package org.apache.spark.sql import java.sql.Date import org.apache.spark.sql.catalyst.util.DateTimeUtils -import org.scalatest.BeforeAndAfterAll +import org.apache.spark.sql.functions._ -class DatetimeExpressionsSuite extends QueryTest with BeforeAndAfterAll { +class DatetimeExpressionsSuite extends QueryTest { private lazy val ctx = org.apache.spark.sql.test.TestSQLContext + import ctx.implicits._ + + val df1 = Seq((1, 2), (3, 1)).toDF("a", "b") + test("function current_date") { // Date constructor would keep the original millis, we need to align it with begin of day. + checkAnswer(df1.select(current_date()), + Seq( + Row(new Date(DateTimeUtils.daysToMillis( + DateTimeUtils.millisToDays(System.currentTimeMillis())))), + Row(new Date(DateTimeUtils.daysToMillis( + DateTimeUtils.millisToDays(System.currentTimeMillis())))))) checkAnswer(ctx.sql("""SELECT CURRENT_DATE()"""), Row(new Date(DateTimeUtils.daysToMillis( DateTimeUtils.millisToDays(System.currentTimeMillis()))))) } test("function current_timestamp") { + checkAnswer(df1.select(countDistinct(current_timestamp())), Row(1)) // Execution in one query should return the same value checkAnswer(ctx.sql("""SELECT CURRENT_TIMESTAMP() = CURRENT_TIMESTAMP()"""), Row(true)) From e11ae750686c5240177e5afca4da9f7fe1581269 Mon Sep 17 00:00:00 2001 From: Daoyuan Wang Date: Thu, 2 Jul 2015 21:31:11 -0700 Subject: [PATCH 5/7] refine tests --- .../spark/sql/DatetimeExpressionsSuite.scala | 23 +++++++------------ 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala index cb6ddce13c634..2c019237ff504 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala @@ -30,16 +30,12 @@ class DatetimeExpressionsSuite extends QueryTest { val df1 = Seq((1, 2), (3, 1)).toDF("a", "b") test("function current_date") { - // Date constructor would keep the original millis, we need to align it with begin of day. - checkAnswer(df1.select(current_date()), - Seq( - Row(new Date(DateTimeUtils.daysToMillis( - DateTimeUtils.millisToDays(System.currentTimeMillis())))), - Row(new Date(DateTimeUtils.daysToMillis( - DateTimeUtils.millisToDays(System.currentTimeMillis())))))) - checkAnswer(ctx.sql("""SELECT CURRENT_DATE()"""), - Row(new Date(DateTimeUtils.daysToMillis( - DateTimeUtils.millisToDays(System.currentTimeMillis()))))) + val d0 = DateTimeUtils.millisToDays(System.currentTimeMillis()) + val d1 = DateTimeUtils.fromJavaDate(df1.select(current_date()).collect().head.getDate(0)) + val d2 = DateTimeUtils.fromJavaDate( + ctx.sql("""SELECT CURRENT_DATE()""").collect().head.getDate(0)) + val d3 = DateTimeUtils.millisToDays(System.currentTimeMillis()) + assert(d0 <= d1 && d1 <= d2 && d2 <= d3 && d3 - d0 <= 1) } test("function current_timestamp") { @@ -47,11 +43,8 @@ class DatetimeExpressionsSuite extends QueryTest { // Execution in one query should return the same value checkAnswer(ctx.sql("""SELECT CURRENT_TIMESTAMP() = CURRENT_TIMESTAMP()"""), Row(true)) - // By the time we run check, current timestamp has been different. - // So we just check the date part. - checkAnswer(ctx.sql("""SELECT CAST(CURRENT_TIMESTAMP() AS DATE)"""), - Row(new Date(DateTimeUtils.daysToMillis( - DateTimeUtils.millisToDays(System.currentTimeMillis()))))) + assert(math.abs(ctx.sql("""SELECT CURRENT_TIMESTAMP()""").collect().head.getTimestamp( + 0).getTime - System.currentTimeMillis()) < 5000) } } From 27c9f95d512229b65eed10b0e2b6abc2d6952f11 Mon Sep 17 00:00:00 2001 From: Daoyuan Wang Date: Thu, 2 Jul 2015 22:40:39 -0700 Subject: [PATCH 6/7] refine tests.. --- .../expressions/DatetimeFunctionsSuite.scala | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala index 885f7caa4db2c..1618c24871c60 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala @@ -17,24 +17,21 @@ package org.apache.spark.sql.catalyst.expressions -import java.sql.Date - import org.apache.spark.SparkFunSuite -import org.apache.spark.sql.types.DateType +import org.apache.spark.sql.catalyst.util.DateTimeUtils class DatetimeFunctionsSuite extends SparkFunSuite with ExpressionEvalHelper { test("datetime function current_date") { - checkEvaluation( - CurrentDate(), - new Date(System.currentTimeMillis), EmptyRow) + val d0 = DateTimeUtils.millisToDays(System.currentTimeMillis()) + val cd = CurrentDate().eval(EmptyRow).asInstanceOf[Int] + val d1 = DateTimeUtils.millisToDays(System.currentTimeMillis()) + assert(d0 <= cd && cd <= d1 && d1 - d0 <= 1) } test("datetime function current_timestamp") { - // By the time we run check, current timestamp has been different. - // So we just check the date part. - checkEvaluation( - Cast(CurrentTimestamp(), DateType), - new Date(System.currentTimeMillis), EmptyRow) + val ct = DateTimeUtils.toJavaTimestamp(CurrentTimestamp().eval(EmptyRow).asInstanceOf[Long]) + val t1 = System.currentTimeMillis() + assert(math.abs(t1 - ct.getTime) < 5000) } } From 6a20b64ab169ba31d61cfffa1c6151f34304e8a2 Mon Sep 17 00:00:00 2001 From: Daoyuan Wang Date: Thu, 2 Jul 2015 23:31:08 -0700 Subject: [PATCH 7/7] remove codegen and add lazy in testsuite --- .../catalyst/expressions/datetimeFunctions.scala | 16 ---------------- .../spark/sql/DatetimeExpressionsSuite.scala | 4 +--- 2 files changed, 1 insertion(+), 19 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala index 571dca88809eb..13ba2f2e5d62d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala @@ -34,15 +34,6 @@ case class CurrentDate() extends LeafExpression { override def eval(input: InternalRow): Any = { DateTimeUtils.millisToDays(System.currentTimeMillis()) } - - override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { - val datetimeUtils = "org.apache.spark.sql.catalyst.util.DateTimeUtils" - s""" - boolean ${ev.isNull} = false; - ${ctx.javaType(dataType)} ${ev.primitive} = - $datetimeUtils.millisToDays(System.currentTimeMillis()); - """ - } } /** @@ -58,11 +49,4 @@ case class CurrentTimestamp() extends LeafExpression { override def eval(input: InternalRow): Any = { System.currentTimeMillis() * 10000L } - - override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = { - s""" - boolean ${ev.isNull} = false; - ${ctx.javaType(dataType)} ${ev.primitive} = System.currentTimeMillis() * 10000L; - """ - } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala index 2c019237ff504..44b915304533c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala @@ -17,8 +17,6 @@ package org.apache.spark.sql -import java.sql.Date - import org.apache.spark.sql.catalyst.util.DateTimeUtils import org.apache.spark.sql.functions._ @@ -27,7 +25,7 @@ class DatetimeExpressionsSuite extends QueryTest { import ctx.implicits._ - val df1 = Seq((1, 2), (3, 1)).toDF("a", "b") + lazy val df1 = Seq((1, 2), (3, 1)).toDF("a", "b") test("function current_date") { val d0 = DateTimeUtils.millisToDays(System.currentTimeMillis())