Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-41580][SQL] Assign name to _LEGACY_ERROR_TEMP_2137 #39305

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
10 changes: 5 additions & 5 deletions core/src/main/resources/error/error-classes.json
Expand Up @@ -755,6 +755,11 @@
"The identifier <ident> is invalid. Please, consider quoting it with back-quotes as `<ident>`."
]
},
"INVALID_JSON_ROOT_FIELD" : {
"message" : [
"Cannot convert JSON root field to target Spark type."
]
},
"INVALID_JSON_SCHEMA_MAP_TYPE" : {
"message" : [
"Input schema <jsonSchema> can only contain STRING as a key type for a MAP."
Expand Down Expand Up @@ -4110,11 +4115,6 @@
"Failed to parse an empty string for data type <dataType>"
]
},
"_LEGACY_ERROR_TEMP_2137" : {
"message" : [
"Root converter returned null"
]
},
"_LEGACY_ERROR_TEMP_2138" : {
"message" : [
"Cannot have circular references in bean class, but got the circular reference of class <clazz>"
Expand Down
Expand Up @@ -1457,7 +1457,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {

def rootConverterReturnNullError(): SparkRuntimeException = {
new SparkRuntimeException(
errorClass = "_LEGACY_ERROR_TEMP_2137",
errorClass = "INVALID_JSON_ROOT_FIELD",
messageParameters = Map.empty)
}

Expand Down
Expand Up @@ -25,11 +25,12 @@ import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period, ZoneId}
import java.util.Locale

import com.fasterxml.jackson.core.JsonFactory
import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.hadoop.fs.{Path, PathFilter}
import org.apache.hadoop.io.SequenceFile.CompressionType
import org.apache.hadoop.io.compress.GzipCodec

import org.apache.spark.{SparkConf, SparkException, SparkUpgradeException, TestUtils}
import org.apache.spark.{SparkConf, SparkException, SparkRuntimeException, SparkUpgradeException, TestUtils}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{functions => F, _}
import org.apache.spark.sql.catalyst.json._
Expand Down Expand Up @@ -3192,10 +3193,17 @@ abstract class JsonSuite
}

test("SPARK-36379: proceed parsing with root nulls in permissive mode") {
assert(intercept[SparkException] {
val exception = intercept[SparkException] {
spark.read.option("mode", "failfast")
.schema("a string").json(Seq("""[{"a": "str"}, null]""").toDS).collect()
}.getMessage.contains("Malformed records are detected"))
}
assert(exception.getMessage.contains("Malformed records are detected"))

checkError(
exception = ExceptionUtils.getRootCause(exception).asInstanceOf[SparkRuntimeException],
errorClass = "INVALID_JSON_ROOT_FIELD",
parameters = Map.empty
)

// Permissive modes should proceed parsing malformed records (null).
// Here, since an array fails to parse in the middle, we will return one row.
Expand Down