diff --git a/docs/sql-ref-ansi-compliance.md b/docs/sql-ref-ansi-compliance.md
index 37ec8f4ac8f7c..73a53b6deba5b 100644
--- a/docs/sql-ref-ansi-compliance.md
+++ b/docs/sql-ref-ansi-compliance.md
@@ -384,6 +384,7 @@ When ANSI mode is on, it throws exceptions for invalid operations. You can use t
- `try_make_timestamp_ltz`: identical to the function `make_timestamp_ltz`, except that it returns `NULL` result instead of throwing an exception on error.
- `try_make_timestamp_ntz`: identical to the function `make_timestamp_ntz`, except that it returns `NULL` result instead of throwing an exception on error.
- `try_make_interval`: identical to the function `make_interval`, except that it returns `NULL` result instead of throwing an exception on invalid interval.
+ - `try_to_time`: identical to the function `to_time`, except that it returns `NULL` result instead of throwing an exception on string parsing error.
### SQL Keywords (optional, disabled by default)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index b3539b2c85c9d..ec9060f8cb8c2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -463,6 +463,7 @@ object FunctionRegistry {
expressionBuilder("try_sum", TrySumExpressionBuilder, setAlias = true),
expression[TryToBinary]("try_to_binary"),
expressionBuilder("try_to_timestamp", TryToTimestampExpressionBuilder, setAlias = true),
+ expressionBuilder("try_to_time", TryToTimeExpressionBuilder, setAlias = true),
expression[TryAesDecrypt]("try_aes_decrypt"),
expression[TryReflect]("try_reflect"),
expression[TryUrlDecode]("try_url_decode"),
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala
index d66b7fc4ec62f..d02cd7d725ce5 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala
@@ -19,9 +19,10 @@ package org.apache.spark.sql.catalyst.expressions
import java.time.DateTimeException
+import org.apache.spark.sql.catalyst.analysis.ExpressionBuilder
import org.apache.spark.sql.catalyst.expressions.objects.Invoke
import org.apache.spark.sql.catalyst.util.TimeFormatter
-import org.apache.spark.sql.errors.QueryExecutionErrors
+import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.internal.types.StringTypeWithCollation
import org.apache.spark.sql.types.{AbstractDataType, ObjectType, TimeType}
import org.apache.spark.unsafe.types.UTF8String
@@ -120,3 +121,42 @@ case class ToTimeParser(fmt: Option[String]) {
}
}
}
+
+/**
+ * * Parses a column to a time based on the supplied format.
+ */
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+ usage = """
+ _FUNC_(str[, format]) - Parses the `str` expression with the `format` expression to a time.
+ If `format` is malformed or its application does not result in a well formed time, the function
+ returns NULL. By default, it follows casting rules to a time if the `format` is omitted.
+ """,
+ arguments = """
+ Arguments:
+ * str - A string to be parsed to time.
+ * format - Time format pattern to follow. See Datetime Patterns for valid
+ time format patterns.
+ """,
+ examples = """
+ Examples:
+ > SELECT _FUNC_('00:12:00.001');
+ 00:12:00.001
+ > SELECT _FUNC_('12.10.05.999999', 'HH.mm.ss.SSSSSS');
+ 12:10:05.999999
+ > SELECT _FUNC_('foo', 'HH:mm:ss');
+ NULL
+ """,
+ group = "datetime_funcs",
+ since = "4.1.0")
+// scalastyle:on line.size.limit
+object TryToTimeExpressionBuilder extends ExpressionBuilder {
+ override def build(funcName: String, expressions: Seq[Expression]): Expression = {
+ val numArgs = expressions.length
+ if (numArgs == 1 || numArgs == 2) {
+ TryEval(ToTime(expressions.head, expressions.drop(1).lastOption))
+ } else {
+ throw QueryCompilationErrors.wrongNumArgsError(funcName, Seq(1, 2), numArgs)
+ }
+ }
+}
diff --git a/sql/core/src/test/resources/sql-functions/sql-expression-schema.md b/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
index 0db551bb286f2..70853c3410f66 100644
--- a/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
+++ b/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
@@ -367,6 +367,7 @@
| org.apache.spark.sql.catalyst.expressions.TrySubtract | try_subtract | SELECT try_subtract(2, 1) | struct |
| org.apache.spark.sql.catalyst.expressions.TryToBinary | try_to_binary | SELECT try_to_binary('abc', 'utf-8') | struct |
| org.apache.spark.sql.catalyst.expressions.TryToNumber | try_to_number | SELECT try_to_number('454', '999') | struct |
+| org.apache.spark.sql.catalyst.expressions.TryToTimeExpressionBuilder | try_to_time | SELECT try_to_time('00:12:00.001') | struct |
| org.apache.spark.sql.catalyst.expressions.TryToTimestampExpressionBuilder | try_to_timestamp | SELECT try_to_timestamp('2016-12-31 00:12:00') | struct |
| org.apache.spark.sql.catalyst.expressions.TryUrlDecode | try_url_decode | SELECT try_url_decode('https%3A%2F%2Fspark.apache.org') | struct |
| org.apache.spark.sql.catalyst.expressions.TryValidateUTF8 | try_validate_utf8 | SELECT try_validate_utf8('Spark') | struct |
diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out
index 6dc6cb817840c..f4c9e504930b8 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out
@@ -44,3 +44,78 @@ select to_time("13-60", "HH-mm")
-- !query analysis
Project [to_time(13-60, Some(HH-mm)) AS to_time(13-60, HH-mm)#x]
+- OneRowRelation
+
+
+-- !query
+select try_to_time(null), try_to_time('00:12:00'), try_to_time('01:02:03', 'HH:mm:ss')
+-- !query analysis
+Project [try_to_time(to_time(null, None)) AS try_to_time(to_time(NULL))#x, try_to_time(to_time(00:12:00, None)) AS try_to_time(to_time(00:12:00))#x, try_to_time(to_time(01:02:03, Some(HH:mm:ss))) AS try_to_time(to_time(01:02:03, HH:mm:ss))#x]
++- OneRowRelation
+
+
+-- !query
+select try_to_time(1)
+-- !query analysis
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+ "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
+ "sqlState" : "42K09",
+ "messageParameters" : {
+ "inputSql" : "\"1\"",
+ "inputType" : "\"INT\"",
+ "paramIndex" : "first",
+ "requiredType" : "\"STRING\"",
+ "sqlExpr" : "\"to_time(1)\""
+ },
+ "queryContext" : [ {
+ "objectType" : "",
+ "objectName" : "",
+ "startIndex" : 8,
+ "stopIndex" : 21,
+ "fragment" : "try_to_time(1)"
+ } ]
+}
+
+
+-- !query
+select try_to_time('12:48:31 abc')
+-- !query analysis
+Project [try_to_time(to_time(12:48:31 abc, None)) AS try_to_time(to_time(12:48:31 abc))#x]
++- OneRowRelation
+
+
+-- !query
+select try_to_time('10:11:12.', 'HH:mm:ss.SSSSSS')
+-- !query analysis
+Project [try_to_time(to_time(10:11:12., Some(HH:mm:ss.SSSSSS))) AS try_to_time(to_time(10:11:12., HH:mm:ss.SSSSSS))#x]
++- OneRowRelation
+
+
+-- !query
+select try_to_time("02-69", "HH-mm")
+-- !query analysis
+Project [try_to_time(to_time(02-69, Some(HH-mm))) AS try_to_time(to_time(02-69, HH-mm))#x]
++- OneRowRelation
+
+
+-- !query
+select try_to_time('11:12:13', 'HH:mm:ss', 'SSSSSS')
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+ "errorClass" : "WRONG_NUM_ARGS.WITHOUT_SUGGESTION",
+ "sqlState" : "42605",
+ "messageParameters" : {
+ "actualNum" : "3",
+ "docroot" : "https://spark.apache.org/docs/latest",
+ "expectedNum" : "[1, 2]",
+ "functionName" : "`try_to_time`"
+ },
+ "queryContext" : [ {
+ "objectType" : "",
+ "objectName" : "",
+ "startIndex" : 8,
+ "stopIndex" : 52,
+ "fragment" : "try_to_time('11:12:13', 'HH:mm:ss', 'SSSSSS')"
+ } ]
+}
diff --git a/sql/core/src/test/resources/sql-tests/inputs/time.sql b/sql/core/src/test/resources/sql-tests/inputs/time.sql
index 61690221ab9c1..d7dae05e8e8b0 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/time.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/time.sql
@@ -11,3 +11,10 @@ select to_time(time_str, fmt_str) from time_view;
select to_time("11", "HH");
-- invalid: there is no 13 hours
select to_time("13-60", "HH-mm");
+
+select try_to_time(null), try_to_time('00:12:00'), try_to_time('01:02:03', 'HH:mm:ss');
+select try_to_time(1);
+select try_to_time('12:48:31 abc');
+select try_to_time('10:11:12.', 'HH:mm:ss.SSSSSS');
+select try_to_time("02-69", "HH-mm");
+select try_to_time('11:12:13', 'HH:mm:ss', 'SSSSSS');
diff --git a/sql/core/src/test/resources/sql-tests/results/time.sql.out b/sql/core/src/test/resources/sql-tests/results/time.sql.out
index 806cd7262e715..7e22789026308 100644
--- a/sql/core/src/test/resources/sql-tests/results/time.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/time.sql.out
@@ -53,3 +53,86 @@ org.apache.spark.SparkDateTimeException
"input" : "'13-60'"
}
}
+
+
+-- !query
+select try_to_time(null), try_to_time('00:12:00'), try_to_time('01:02:03', 'HH:mm:ss')
+-- !query schema
+struct
+-- !query output
+NULL 00:12:00 01:02:03
+
+
+-- !query
+select try_to_time(1)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+ "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE",
+ "sqlState" : "42K09",
+ "messageParameters" : {
+ "inputSql" : "\"1\"",
+ "inputType" : "\"INT\"",
+ "paramIndex" : "first",
+ "requiredType" : "\"STRING\"",
+ "sqlExpr" : "\"to_time(1)\""
+ },
+ "queryContext" : [ {
+ "objectType" : "",
+ "objectName" : "",
+ "startIndex" : 8,
+ "stopIndex" : 21,
+ "fragment" : "try_to_time(1)"
+ } ]
+}
+
+
+-- !query
+select try_to_time('12:48:31 abc')
+-- !query schema
+struct
+-- !query output
+NULL
+
+
+-- !query
+select try_to_time('10:11:12.', 'HH:mm:ss.SSSSSS')
+-- !query schema
+struct
+-- !query output
+NULL
+
+
+-- !query
+select try_to_time("02-69", "HH-mm")
+-- !query schema
+struct
+-- !query output
+NULL
+
+
+-- !query
+select try_to_time('11:12:13', 'HH:mm:ss', 'SSSSSS')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+ "errorClass" : "WRONG_NUM_ARGS.WITHOUT_SUGGESTION",
+ "sqlState" : "42605",
+ "messageParameters" : {
+ "actualNum" : "3",
+ "docroot" : "https://spark.apache.org/docs/latest",
+ "expectedNum" : "[1, 2]",
+ "functionName" : "`try_to_time`"
+ },
+ "queryContext" : [ {
+ "objectType" : "",
+ "objectName" : "",
+ "startIndex" : 8,
+ "stopIndex" : 52,
+ "fragment" : "try_to_time('11:12:13', 'HH:mm:ss', 'SSSSSS')"
+ } ]
+}