Skip to content

Commit

Permalink
[SPARK-40426][SQL] Return a map from SparkThrowable.getMessageParameters
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?
In the PR, I propose to change the `SparkThrowable` interface:
1. Return a map of parameters names to their values from `getMessageParameters()`
2. Remove `getParameterNames()` because the names can be retrieved from `getMessageParameters()`.

### Why are the changes needed?
To simplifies implementation and improve code maintenance.

### Does this PR introduce _any_ user-facing change?
No.

### How was this patch tested?
By running affected test suites:
```
$ build/sbt "core/testOnly *SparkThrowableSuite"
$ build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite"
```

Closes #37871 from MaxGekk/getMessageParameters-map.

Authored-by: Max Gekk <max.gekk@gmail.com>
Signed-off-by: Hyukjin Kwon <gurwls223@apache.org>
  • Loading branch information
MaxGekk authored and HyukjinKwon committed Sep 15, 2022
1 parent c134c75 commit ea6857a
Show file tree
Hide file tree
Showing 7 changed files with 34 additions and 85 deletions.
12 changes: 5 additions & 7 deletions core/src/main/java/org/apache/spark/SparkThrowable.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@

import org.apache.spark.annotation.Evolving;

import java.util.HashMap;
import java.util.Map;

/**
* Interface mixed into Throwables thrown from Spark.
*
Expand Down Expand Up @@ -51,13 +54,8 @@ default boolean isInternalError() {
return SparkThrowableHelper.isInternalError(this.getErrorClass());
}

default String[] getMessageParameters() {
return new String[]{};
}

// Returns a string array of all parameters that need to be passed to this error message.
default String[] getParameterNames() {
return SparkThrowableHelper.getParameterNames(this.getErrorClass(), this.getErrorSubClass());
default Map<String, String> getMessageParameters() {
return new HashMap();
}

default QueryContext[] getQueryContext() { return new QueryContext[0]; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ public SparkOutOfMemoryError(String errorClass, Map<String, String> messageParam
}

@Override
public String[] getMessageParameters() {
return SparkThrowableHelper.getMessageParameters(errorClass, null, messageParameters);
public Map<String, String> getMessageParameters() {
return messageParameters;
}

@Override
Expand Down
68 changes: 18 additions & 50 deletions core/src/main/scala/org/apache/spark/SparkException.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ import java.sql.{SQLException, SQLFeatureNotSupportedException}
import java.time.DateTimeException
import java.util.ConcurrentModificationException

import scala.collection.JavaConverters._

import org.apache.hadoop.fs.FileAlreadyExistsException

class SparkException(
Expand Down Expand Up @@ -86,11 +88,7 @@ class SparkException(
errorSubClass = Some(errorSubClass),
messageParameters = messageParameters)

override def getMessageParameters: Array[String] = {
errorClass.map { ec =>
SparkThrowableHelper.getMessageParameters(ec, errorSubClass.orNull, messageParameters)
}.getOrElse(Array.empty)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getErrorSubClass: String = errorSubClass.orNull
Expand Down Expand Up @@ -146,9 +144,7 @@ private[spark] class SparkUpgradeException(
SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters), cause)
with SparkThrowable {

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull}
Expand All @@ -166,9 +162,7 @@ private[spark] class SparkArithmeticException(
SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters, summary))
with SparkThrowable {

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull
Expand All @@ -195,9 +189,7 @@ private[spark] class SparkUnsupportedOperationException(
errorSubClass = Some(errorSubClass),
messageParameters = messageParameters)

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull
Expand All @@ -215,9 +207,7 @@ private[spark] class SparkClassNotFoundException(
SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters), cause)
with SparkThrowable {

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull}
Expand All @@ -234,9 +224,7 @@ private[spark] class SparkConcurrentModificationException(
SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters), cause)
with SparkThrowable {

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull}
Expand All @@ -254,9 +242,7 @@ private[spark] class SparkDateTimeException(
SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters, summary))
with SparkThrowable {

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull
Expand All @@ -274,9 +260,7 @@ private[spark] class SparkFileAlreadyExistsException(
SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters))
with SparkThrowable {

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull}
Expand All @@ -292,9 +276,7 @@ private[spark] class SparkFileNotFoundException(
SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters))
with SparkThrowable {

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull}
Expand All @@ -312,9 +294,7 @@ private[spark] class SparkNumberFormatException(
SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters, summary))
with SparkThrowable {

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull
Expand All @@ -334,9 +314,7 @@ private[spark] class SparkIllegalArgumentException(
SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters, summary))
with SparkThrowable {

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull
Expand Down Expand Up @@ -379,9 +357,7 @@ private[spark] class SparkRuntimeException(
cause = null,
context = Array.empty[QueryContext])

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull
Expand All @@ -399,9 +375,7 @@ private[spark] class SparkSecurityException(
SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters))
with SparkThrowable {

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull
Expand All @@ -420,9 +394,7 @@ private[spark] class SparkArrayIndexOutOfBoundsException(
SparkThrowableHelper.getMessage(errorClass, errorSubClass.orNull, messageParameters, summary))
with SparkThrowable {

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull
Expand All @@ -446,9 +418,7 @@ private[spark] class SparkSQLException(
errorSubClass = None,
messageParameters = messageParameters)

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull
Expand All @@ -474,9 +444,7 @@ private[spark] class SparkSQLFeatureNotSupportedException(
errorSubClass = Some(errorSubClass),
messageParameters = messageParameters)

override def getMessageParameters: Array[String] = {
SparkThrowableHelper.getMessageParameters(errorClass, errorSubClass.orNull, messageParameters)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass
override def getErrorSubClass: String = errorSubClass.orNull
Expand Down
22 changes: 3 additions & 19 deletions core/src/main/scala/org/apache/spark/SparkThrowableHelper.scala
Original file line number Diff line number Diff line change
Expand Up @@ -101,20 +101,6 @@ private[spark] object SparkThrowableHelper {
parameterNames
}

def getMessageParameters(
errorClass: String,
errorSubCLass: String,
params: Map[String, String]): Array[String] = {
getParameterNames(errorClass, errorSubCLass).map(params.getOrElse(_, "?"))
}

def getMessageParameters(
errorClass: String,
errorSubCLass: String,
params: java.util.Map[String, String]): Array[String] = {
getParameterNames(errorClass, errorSubCLass).map(params.getOrDefault(_, "?"))
}

def getMessage(
errorClass: String,
errorSubClass: String,
Expand Down Expand Up @@ -185,8 +171,6 @@ private[spark] object SparkThrowableHelper {
}
case MINIMAL | STANDARD =>
val errorClass = e.getErrorClass
assert(e.getParameterNames.size == e.getMessageParameters.size,
"Number of message parameter names and values must be the same")
toJsonString { generator =>
val g = generator.useDefaultPrettyPrinter()
g.writeStartObject()
Expand All @@ -200,10 +184,10 @@ private[spark] object SparkThrowableHelper {
}
val sqlState = e.getSqlState
if (sqlState != null) g.writeStringField("sqlState", sqlState)
val parameterNames = e.getParameterNames
if (!parameterNames.isEmpty) {
val messageParameters = e.getMessageParameters
if (!messageParameters.isEmpty) {
g.writeObjectFieldStart("messageParameters")
(parameterNames zip e.getMessageParameters).sortBy(_._1).foreach { case (name, value) =>
messageParameters.asScala.toSeq.sortBy(_._1).foreach { case (name, value) =>
g.writeStringField(name, value)
}
g.writeEndObject()
Expand Down
3 changes: 2 additions & 1 deletion core/src/test/scala/org/apache/spark/SparkFunSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import java.nio.file.{Files, Path}
import java.util.{Locale, TimeZone}

import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer

import org.apache.commons.io.FileUtils
Expand Down Expand Up @@ -305,7 +306,7 @@ abstract class SparkFunSuite
assert(exception.getErrorSubClass === errorSubClass.get)
}
sqlState.foreach(state => assert(exception.getSqlState === state))
val expectedParameters = (exception.getParameterNames zip exception.getMessageParameters).toMap
val expectedParameters = exception.getMessageParameters.asScala
if (matchPVals == true) {
assert(expectedParameters.size === parameters.size)
expectedParameters.foreach(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ class SparkThrowableSuite extends SparkFunSuite {
getMessage("UNRESOLVED_COLUMN", "WITHOUT_SUGGESTION", Map.empty[String, String])
}
assert(e.getErrorClass === "INTERNAL_ERROR")
assert(e.getMessageParameters.head.contains("Undefined an error message parameter"))
assert(e.getMessageParameters().get("message").contains("Undefined an error message parameter"))

// Does not fail with too many args (expects 0 args)
assert(getMessage("DIVIDE_BY_ZERO", null, Map("config" -> "foo", "a" -> "bar")) ==
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

package org.apache.spark.sql

import scala.collection.JavaConverters._

import org.apache.spark.{QueryContext, SparkThrowable, SparkThrowableHelper}
import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
Expand Down Expand Up @@ -165,11 +167,7 @@ class AnalysisException protected[sql] (
message
}

override def getMessageParameters: Array[String] = {
errorClass.map { ec =>
SparkThrowableHelper.getMessageParameters(ec, errorSubClass.orNull, messageParameters)
}.getOrElse(Array.empty)
}
override def getMessageParameters: java.util.Map[String, String] = messageParameters.asJava

override def getErrorClass: String = errorClass.orNull
override def getErrorSubClass: String = errorSubClass.orNull
Expand Down

0 comments on commit ea6857a

Please sign in to comment.