Skip to content

Commit

Permalink
[SPARK-21957][SQL][FOLLOWUP] Support CURRENT_USER without tailing par…
Browse files Browse the repository at this point in the history
…entheses

### What changes were proposed in this pull request?

A followup for 345d35e, in this PR we support CURRENT_USER without tailing parentheses in default mode. And for ANSI mode, we can only use CURRENT_USER without tailing parentheses because it is a reserved keyword that cannot be used as a function name

### Why are the changes needed?

1. make it the same as current_date/current_timestamp
2. better ANSI compliance
### Does this PR introduce _any_ user-facing change?

no, just a followup

### How was this patch tested?

new tests

Closes apache#32770 from yaooqinn/SPARK-21957-F.

Authored-by: Kent Yao <yao@apache.org>
Signed-off-by: Wenchen Fan <wenchen@databricks.com>
  • Loading branch information
yaooqinn authored and cloud-fan committed Jun 4, 2021
1 parent 6ce5f24 commit dc3317f
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -816,7 +816,7 @@ valueExpression
;

primaryExpression
: name=(CURRENT_DATE | CURRENT_TIMESTAMP) #currentDatetime
: name=(CURRENT_DATE | CURRENT_TIMESTAMP | CURRENT_USER) #currentLike
| CASE whenClause+ (ELSE elseExpression=expression)? END #searchedCase
| CASE value=expression whenClause+ (ELSE elseExpression=expression)? END #simpleCase
| name=(CAST | TRY_CAST) '(' expression AS dataType ')' #cast
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1652,6 +1652,7 @@ class Analyzer(override val catalogManager: CatalogManager)
private val literalFunctions: Seq[(String, () => Expression, Expression => String)] = Seq(
(CurrentDate().prettyName, () => CurrentDate(), toPrettySQL(_)),
(CurrentTimestamp().prettyName, () => CurrentTimestamp(), toPrettySQL(_)),
(CurrentUser().prettyName, () => CurrentUser(), toPrettySQL),
(VirtualColumn.hiveGroupingIdName, () => GroupingID(Nil), _ => VirtualColumn.hiveGroupingIdName)
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1683,13 +1683,15 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
}
}

override def visitCurrentDatetime(ctx: CurrentDatetimeContext): Expression = withOrigin(ctx) {
override def visitCurrentLike(ctx: CurrentLikeContext): Expression = withOrigin(ctx) {
if (conf.ansiEnabled) {
ctx.name.getType match {
case SqlBaseParser.CURRENT_DATE =>
CurrentDate()
case SqlBaseParser.CURRENT_TIMESTAMP =>
CurrentTimestamp()
case SqlBaseParser.CURRENT_USER =>
CurrentUser()
}
} else {
// If the parser is not in ansi mode, we should return `UnresolvedAttribute`, in case there
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
package org.apache.spark.sql

import org.apache.spark.{SPARK_REVISION, SPARK_VERSION_SHORT}
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession

class MiscFunctionsSuite extends QueryTest with SharedSparkSession {
Expand All @@ -42,8 +44,17 @@ class MiscFunctionsSuite extends QueryTest with SharedSparkSession {
}

test("SPARK-21957: get current_user in normal spark apps") {
val df = sql("select current_user()")
checkAnswer(df, Row(spark.sparkContext.sparkUser))
val user = spark.sparkContext.sparkUser
withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") {
val df = sql("select current_user(), current_user")
checkAnswer(df, Row(user, user))
}
withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
val df = sql("select current_user")
checkAnswer(df, Row(spark.sparkContext.sparkUser))
val e = intercept[ParseException](sql("select current_user()"))
assert(e.getMessage.contains("current_user"))
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.hive.thriftserver
import java.sql.SQLException
import java.util.concurrent.atomic.AtomicBoolean

import org.apache.hive.service.cli.HiveSQLException
import org.apache.hive.service.cli.{HiveSQLException, OperationHandle}

import org.apache.spark.TaskKilled
import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd}
Expand Down Expand Up @@ -125,9 +125,23 @@ trait ThriftServerWithSparkContextSuite extends SharedThriftServer {
withCLIServiceClient(clientUser) { client =>
val sessionHandle = client.openSession(clientUser, "")
val confOverlay = new java.util.HashMap[java.lang.String, java.lang.String]
val opHandle = client.executeStatement(sessionHandle, sql, confOverlay)
val rowSet = client.fetchResults(opHandle)
assert(rowSet.toTRowSet.getColumns.get(0).getStringVal.getValues.get(0) === clientUser)
val exec: String => OperationHandle = client.executeStatement(sessionHandle, _, confOverlay)

exec(s"set ${SQLConf.ANSI_ENABLED.key}=false")

val opHandle1 = exec("select current_user(), current_user")
val rowSet1 = client.fetchResults(opHandle1)
rowSet1.toTRowSet.getColumns.forEach { col =>
assert(col.getStringVal.getValues.get(0) === clientUser)
}

exec(s"set ${SQLConf.ANSI_ENABLED.key}=true")
val opHandle2 = exec("select current_user")
assert(client.fetchResults(opHandle2).toTRowSet.getColumns.get(0)
.getStringVal.getValues.get(0) === clientUser)

val e = intercept[HiveSQLException](exec("select current_user()"))
assert(e.getMessage.contains("current_user"))
}
}
}
Expand Down

0 comments on commit dc3317f

Please sign in to comment.