Skip to content

Commit

Permalink
[SPARK-48601][SQL] Give a more user friendly error message when setti…
Browse files Browse the repository at this point in the history
…ng a null value for JDBC Option

### What changes were proposed in this pull request?
In this PR, proposed changes add a check for validating that JDBC Option values are not null, and throw a user-friendly error in case that they are.

### Why are the changes needed?
When setting a `null` value for JDBC Option, a spark internal exception is thrown due to java.lang.NullPointerException. A more user-friendly message should be thrown in such cases.

### Does this PR introduce _any_ user-facing change?
No

### How was this patch tested?
A new test in this PR.

### Was this patch authored or co-authored using generative AI tooling?
No

Closes #46955 from stevomitric/stevomitric/fix-jdbcoptions.

Authored-by: Stevo Mitric <stevo.mitric@databricks.com>
Signed-off-by: Wenchen Fan <wenchen@databricks.com>
  • Loading branch information
stevomitric authored and cloud-fan committed Jun 19, 2024
1 parent 5e28e95 commit 878dd6a
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 2 deletions.
6 changes: 6 additions & 0 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -3328,6 +3328,12 @@
],
"sqlState" : "42000"
},
"NULL_DATA_SOURCE_OPTION" : {
"message" : [
"Data source read/write option <option> cannot have null value."
],
"sqlState" : "22024"
},
"NULL_MAP_KEY" : {
"message" : [
"Cannot use null as map key."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,13 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat
"functionName" -> toSQLId(funcName)))
}

def nullDataSourceOption(option: String): Throwable = {
new AnalysisException(
errorClass = "NULL_DATA_SOURCE_OPTION",
messageParameters = Map("option" -> option)
)
}

def unorderablePivotColError(pivotCol: Expression): Throwable = {
new AnalysisException(
errorClass = "INCOMPARABLE_PIVOT_COLUMN",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.commons.io.FilenameUtils
import org.apache.spark.SparkFiles
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.TimestampNTZType
import org.apache.spark.util.Utils
Expand All @@ -52,7 +52,14 @@ class JDBCOptions(
*/
val asProperties: Properties = {
val properties = new Properties()
parameters.originalMap.foreach { case (k, v) => properties.setProperty(k, v) }
parameters.originalMap.foreach { case (k, v) =>
// If an option value is `null`, throw a user-friendly error. Keys here cannot be null, as
// scala's implementation of Maps prohibits null keys.
if (v == null) {
throw QueryCompilationErrors.nullDataSourceOption(k)
}
properties.setProperty(k, v)
}
properties
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -369,6 +369,20 @@ class JDBCV2Suite extends QueryTest with SharedSparkSession with ExplainSuiteHel
}
}

test("null value for option exception") {
val df = spark.read
.option("pushDownOffset", null)
.table("h2.test.employee")
checkError(
exception = intercept[AnalysisException] {
df.collect()
},
errorClass = "NULL_DATA_SOURCE_OPTION",
parameters = Map(
"option" -> "pushDownOffset")
)
}

test("simple scan with OFFSET") {
val df1 = spark.read
.table("h2.test.employee")
Expand Down

0 comments on commit 878dd6a

Please sign in to comment.