Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/sql-ref-ansi-compliance.md
Original file line number Diff line number Diff line change
Expand Up @@ -384,6 +384,7 @@ When ANSI mode is on, it throws exceptions for invalid operations. You can use t
- `try_make_timestamp_ltz`: identical to the function `make_timestamp_ltz`, except that it returns `NULL` result instead of throwing an exception on error.
- `try_make_timestamp_ntz`: identical to the function `make_timestamp_ntz`, except that it returns `NULL` result instead of throwing an exception on error.
- `try_make_interval`: identical to the function `make_interval`, except that it returns `NULL` result instead of throwing an exception on invalid interval.
- `try_to_time`: identical to the function `to_time`, except that it returns `NULL` result instead of throwing an exception on string parsing error.

### SQL Keywords (optional, disabled by default)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -463,6 +463,7 @@ object FunctionRegistry {
expressionBuilder("try_sum", TrySumExpressionBuilder, setAlias = true),
expression[TryToBinary]("try_to_binary"),
expressionBuilder("try_to_timestamp", TryToTimestampExpressionBuilder, setAlias = true),
expressionBuilder("try_to_time", TryToTimeExpressionBuilder, setAlias = true),
expression[TryAesDecrypt]("try_aes_decrypt"),
expression[TryReflect]("try_reflect"),
expression[TryUrlDecode]("try_url_decode"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@ package org.apache.spark.sql.catalyst.expressions

import java.time.DateTimeException

import org.apache.spark.sql.catalyst.analysis.ExpressionBuilder
import org.apache.spark.sql.catalyst.expressions.objects.Invoke
import org.apache.spark.sql.catalyst.util.TimeFormatter
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.internal.types.StringTypeWithCollation
import org.apache.spark.sql.types.{AbstractDataType, ObjectType, TimeType}
import org.apache.spark.unsafe.types.UTF8String
Expand Down Expand Up @@ -120,3 +121,42 @@ case class ToTimeParser(fmt: Option[String]) {
}
}
}

/**
* * Parses a column to a time based on the supplied format.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = """
_FUNC_(str[, format]) - Parses the `str` expression with the `format` expression to a time.
If `format` is malformed or its application does not result in a well formed time, the function
returns NULL. By default, it follows casting rules to a time if the `format` is omitted.
""",
arguments = """
Arguments:
* str - A string to be parsed to time.
* format - Time format pattern to follow. See <a href="https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html">Datetime Patterns</a> for valid
time format patterns.
""",
examples = """
Examples:
> SELECT _FUNC_('00:12:00.001');
00:12:00.001
> SELECT _FUNC_('12.10.05.999999', 'HH.mm.ss.SSSSSS');
12:10:05.999999
> SELECT _FUNC_('foo', 'HH:mm:ss');
NULL
""",
group = "datetime_funcs",
since = "4.1.0")
// scalastyle:on line.size.limit
object TryToTimeExpressionBuilder extends ExpressionBuilder {
override def build(funcName: String, expressions: Seq[Expression]): Expression = {
val numArgs = expressions.length
if (numArgs == 1 || numArgs == 2) {
TryEval(ToTime(expressions.head, expressions.drop(1).lastOption))
} else {
throw QueryCompilationErrors.wrongNumArgsError(funcName, Seq(1, 2), numArgs)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -367,6 +367,7 @@
| org.apache.spark.sql.catalyst.expressions.TrySubtract | try_subtract | SELECT try_subtract(2, 1) | struct<try_subtract(2, 1):int> |
| org.apache.spark.sql.catalyst.expressions.TryToBinary | try_to_binary | SELECT try_to_binary('abc', 'utf-8') | struct<try_to_binary(abc, utf-8):binary> |
| org.apache.spark.sql.catalyst.expressions.TryToNumber | try_to_number | SELECT try_to_number('454', '999') | struct<try_to_number(454, 999):decimal(3,0)> |
| org.apache.spark.sql.catalyst.expressions.TryToTimeExpressionBuilder | try_to_time | SELECT try_to_time('00:12:00.001') | struct<try_to_time(to_time(00:12:00.001)):time(6)> |
| org.apache.spark.sql.catalyst.expressions.TryToTimestampExpressionBuilder | try_to_timestamp | SELECT try_to_timestamp('2016-12-31 00:12:00') | struct<try_to_timestamp(2016-12-31 00:12:00):timestamp> |
| org.apache.spark.sql.catalyst.expressions.TryUrlDecode | try_url_decode | SELECT try_url_decode('https%3A%2F%2Fspark.apache.org') | struct<try_url_decode(https%3A%2F%2Fspark.apache.org):string> |
| org.apache.spark.sql.catalyst.expressions.TryValidateUTF8 | try_validate_utf8 | SELECT try_validate_utf8('Spark') | struct<try_validate_utf8(Spark):string> |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,3 +44,78 @@ select to_time("13-60", "HH-mm")
-- !query analysis
Project [to_time(13-60, Some(HH-mm)) AS to_time(13-60, HH-mm)#x]
+- OneRowRelation


-- !query
select try_to_time(null), try_to_time('00:12:00'), try_to_time('01:02:03', 'HH:mm:ss')
-- !query analysis
Project [try_to_time(to_time(null, None)) AS try_to_time(to_time(NULL))#x, try_to_time(to_time(00:12:00, None)) AS try_to_time(to_time(00:12:00))#x, try_to_time(to_time(01:02:03, Some(HH:mm:ss))) AS try_to_time(to_time(01:02:03, HH:mm:ss))#x]
+- OneRowRelation


-- !query
select try_to_time(1)
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"inputSql" : "\"1\"",
"inputType" : "\"INT\"",
"paramIndex" : "first",
"requiredType" : "\"STRING\"",
"sqlExpr" : "\"to_time(1)\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 21,
"fragment" : "try_to_time(1)"
} ]
}


-- !query
select try_to_time('12:48:31 abc')
-- !query analysis
Project [try_to_time(to_time(12:48:31 abc, None)) AS try_to_time(to_time(12:48:31 abc))#x]
+- OneRowRelation


-- !query
select try_to_time('10:11:12.', 'HH:mm:ss.SSSSSS')
-- !query analysis
Project [try_to_time(to_time(10:11:12., Some(HH:mm:ss.SSSSSS))) AS try_to_time(to_time(10:11:12., HH:mm:ss.SSSSSS))#x]
+- OneRowRelation


-- !query
select try_to_time("02-69", "HH-mm")
-- !query analysis
Project [try_to_time(to_time(02-69, Some(HH-mm))) AS try_to_time(to_time(02-69, HH-mm))#x]
+- OneRowRelation


-- !query
select try_to_time('11:12:13', 'HH:mm:ss', 'SSSSSS')
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "WRONG_NUM_ARGS.WITHOUT_SUGGESTION",
"sqlState" : "42605",
"messageParameters" : {
"actualNum" : "3",
"docroot" : "https://spark.apache.org/docs/latest",
"expectedNum" : "[1, 2]",
"functionName" : "`try_to_time`"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 52,
"fragment" : "try_to_time('11:12:13', 'HH:mm:ss', 'SSSSSS')"
} ]
}
7 changes: 7 additions & 0 deletions sql/core/src/test/resources/sql-tests/inputs/time.sql
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,10 @@ select to_time(time_str, fmt_str) from time_view;
select to_time("11", "HH");
-- invalid: there is no 13 hours
select to_time("13-60", "HH-mm");

select try_to_time(null), try_to_time('00:12:00'), try_to_time('01:02:03', 'HH:mm:ss');
select try_to_time(1);
select try_to_time('12:48:31 abc');
select try_to_time('10:11:12.', 'HH:mm:ss.SSSSSS');
select try_to_time("02-69", "HH-mm");
select try_to_time('11:12:13', 'HH:mm:ss', 'SSSSSS');
83 changes: 83 additions & 0 deletions sql/core/src/test/resources/sql-tests/results/time.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -53,3 +53,86 @@ org.apache.spark.SparkDateTimeException
"input" : "'13-60'"
}
}


-- !query
select try_to_time(null), try_to_time('00:12:00'), try_to_time('01:02:03', 'HH:mm:ss')
-- !query schema
struct<try_to_time(to_time(NULL)):time(6),try_to_time(to_time(00:12:00)):time(6),try_to_time(to_time(01:02:03, HH:mm:ss)):time(6)>
-- !query output
NULL 00:12:00 01:02:03


-- !query
select try_to_time(1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
"sqlState" : "42K09",
"messageParameters" : {
"inputSql" : "\"1\"",
"inputType" : "\"INT\"",
"paramIndex" : "first",
"requiredType" : "\"STRING\"",
"sqlExpr" : "\"to_time(1)\""
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 21,
"fragment" : "try_to_time(1)"
} ]
}


-- !query
select try_to_time('12:48:31 abc')
-- !query schema
struct<try_to_time(to_time(12:48:31 abc)):time(6)>
-- !query output
NULL


-- !query
select try_to_time('10:11:12.', 'HH:mm:ss.SSSSSS')
-- !query schema
struct<try_to_time(to_time(10:11:12., HH:mm:ss.SSSSSS)):time(6)>
-- !query output
NULL


-- !query
select try_to_time("02-69", "HH-mm")
-- !query schema
struct<try_to_time(to_time(02-69, HH-mm)):time(6)>
-- !query output
NULL


-- !query
select try_to_time('11:12:13', 'HH:mm:ss', 'SSSSSS')
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "WRONG_NUM_ARGS.WITHOUT_SUGGESTION",
"sqlState" : "42605",
"messageParameters" : {
"actualNum" : "3",
"docroot" : "https://spark.apache.org/docs/latest",
"expectedNum" : "[1, 2]",
"functionName" : "`try_to_time`"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 8,
"stopIndex" : 52,
"fragment" : "try_to_time('11:12:13', 'HH:mm:ss', 'SSSSSS')"
} ]
}