From edbe0b7e7f29631f6b92255a7b561c916a6c39cc Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Fri, 7 Dec 2018 18:00:11 +0100 Subject: [PATCH 1/3] Introducing JSONOptionsInWrite --- .../expressions/jsonExpressions.scala | 6 +- .../spark/sql/catalyst/json/JSONOptions.scala | 63 ++++++++++++++++++- .../catalyst/json/JacksonGeneratorSuite.scala | 2 +- .../apache/spark/sql/DataFrameReader.scala | 4 +- .../scala/org/apache/spark/sql/Dataset.scala | 4 +- .../datasources/json/JsonFileFormat.scala | 5 +- .../datasources/json/JsonSuite.scala | 7 +-- 7 files changed, 74 insertions(+), 17 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala index e0cab537ce1c6..44a4d93c0ff8a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala @@ -569,7 +569,7 @@ case class JsonToStructs( val nameOfCorruptRecord = SQLConf.get.getConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD) @transient lazy val parser = { - val parsedOptions = new JSONOptions(options, timeZoneId.get, nameOfCorruptRecord) + val parsedOptions = new JSONOptionsInRead(options, timeZoneId.get, nameOfCorruptRecord) val mode = parsedOptions.parseMode if (mode != PermissiveMode && mode != FailFastMode) { throw new IllegalArgumentException(s"from_json() doesn't support the ${mode.name} mode. " + @@ -660,7 +660,7 @@ case class StructsToJson( @transient lazy val gen = new JacksonGenerator( - inputSchema, writer, new JSONOptions(options, timeZoneId.get)) + inputSchema, writer, new JSONOptionsInWrite(options, timeZoneId.get)) @transient lazy val inputSchema = child.dataType @@ -764,7 +764,7 @@ case class SchemaOfJson( override def nullable: Boolean = false @transient - private lazy val jsonOptions = new JSONOptions(options, "UTC") + private lazy val jsonOptions = new JSONOptionsInRead(options, "UTC") @transient private lazy val jsonFactory = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala index e10b8a327c01a..2f6384425857e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala @@ -20,6 +20,8 @@ package org.apache.spark.sql.catalyst.json import java.nio.charset.{Charset, StandardCharsets} import java.util.{Locale, TimeZone} +import scala.reflect.ClassTag + import com.fasterxml.jackson.core.{JsonFactory, JsonParser} import org.apache.commons.lang3.time.FastDateFormat @@ -31,7 +33,7 @@ import org.apache.spark.sql.catalyst.util._ * * Most of these map directly to Jackson's internal options, specified in [[JsonParser.Feature]]. */ -private[sql] class JSONOptions( +private[sql] abstract class JSONOptions( @transient val parameters: CaseInsensitiveMap[String], defaultTimeZoneId: String, defaultColumnNameOfCorruptRecord: String) @@ -65,7 +67,7 @@ private[sql] class JSONOptions( parameters.get("allowNonNumericNumbers").map(_.toBoolean).getOrElse(true) val allowBackslashEscapingAnyCharacter = parameters.get("allowBackslashEscapingAnyCharacter").map(_.toBoolean).getOrElse(false) - private val allowUnquotedControlChars = + val allowUnquotedControlChars = parameters.get("allowUnquotedControlChars").map(_.toBoolean).getOrElse(false) val compressionCodec = parameters.get("compression").map(CompressionCodecs.getCodecClassName) val parseMode: ParseMode = @@ -134,6 +136,21 @@ private[sql] class JSONOptions( } } +private[sql] object JSONOptions { + def notApplicable[T: ClassTag]( + parameters: CaseInsensitiveMap[String], + option: String, + where: String): T = { + if (parameters.get(option).isDefined) { + // scalastyle:off throwerror + throw new NotImplementedError(s"""The JSON option "${option}" is not applicable in $where.""") + // scalastyle:on throwerror + } else { + new Array[T](1)(0) + } + } +} + private[sql] class JSONOptionsInRead( @transient override val parameters: CaseInsensitiveMap[String], defaultTimeZoneId: String, @@ -162,6 +179,13 @@ private[sql] class JSONOptionsInRead( enc } + + override val compressionCodec: Option[String] = notApplicable[Option[String]]("compression") + override val pretty: Boolean = notApplicable[Boolean]("pretty") + + def notApplicable[T: ClassTag](option: String): T = { + JSONOptions.notApplicable(parameters, option, "read") + } } private[sql] object JSONOptionsInRead { @@ -176,3 +200,38 @@ private[sql] object JSONOptionsInRead { Charset.forName("UTF-32") ) } + +private[sql] class JSONOptionsInWrite( + @transient override val parameters: CaseInsensitiveMap[String], + defaultTimeZoneId: String) + extends JSONOptions(parameters, defaultTimeZoneId, "") { + + def this( + parameters: Map[String, String], + defaultTimeZoneId: String) = { + this( + CaseInsensitiveMap(parameters), + defaultTimeZoneId) + } + + override val samplingRatio = notApplicable[Double]("samplingRatio") + override val primitivesAsString = notApplicable[Boolean]("primitivesAsString") + override val prefersDecimal = notApplicable[Boolean]("prefersDecimal") + override val allowComments = notApplicable[Boolean]("allowComments") + override val allowUnquotedFieldNames = notApplicable[Boolean]("allowUnquotedFieldNames") + override val allowSingleQuotes = notApplicable[Boolean]("allowSingleQuotes") + override val allowNumericLeadingZeros = notApplicable[Boolean]("allowNumericLeadingZeros") + override val allowNonNumericNumbers = notApplicable[Boolean]("allowNonNumericNumbers") + override val allowBackslashEscapingAnyCharacter = { + notApplicable[Boolean]("allowBackslashEscapingAnyCharacter") + } + override val allowUnquotedControlChars = notApplicable[Boolean]("allowUnquotedControlChars") + override val parseMode: ParseMode = notApplicable[ParseMode]("mode") + override val columnNameOfCorruptRecord = notApplicable[String]("columnNameOfCorruptRecord") + override val dropFieldIfAllNull = notApplicable[Boolean]("dropFieldIfAllNull") + override val multiLine = notApplicable[Boolean]("multiLine") + + def notApplicable[T: ClassTag](option: String): T = { + JSONOptions.notApplicable(parameters, option, "write") + } +} diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/json/JacksonGeneratorSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/json/JacksonGeneratorSuite.scala index 9b27490ed0e35..799a89ff0334c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/json/JacksonGeneratorSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/json/JacksonGeneratorSuite.scala @@ -27,7 +27,7 @@ import org.apache.spark.sql.types._ class JacksonGeneratorSuite extends SparkFunSuite { val gmtId = DateTimeUtils.TimeZoneGMT.getID - val option = new JSONOptions(Map.empty, gmtId) + val option = new JSONOptionsInRead(Map.empty, gmtId) test("initial with StructType and write out a row") { val dataType = StructType(StructField("a", IntegerType) :: Nil) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala index 661fe98d8c901..c18a7e8ae0c3f 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala @@ -30,7 +30,7 @@ import org.apache.spark.internal.Logging import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.csv.{CSVHeaderChecker, CSVOptions, UnivocityParser} import org.apache.spark.sql.catalyst.expressions.ExprUtils -import org.apache.spark.sql.catalyst.json.{CreateJacksonParser, JacksonParser, JSONOptions} +import org.apache.spark.sql.catalyst.json.{CreateJacksonParser, JacksonParser, JSONOptionsInRead} import org.apache.spark.sql.catalyst.util.FailureSafeParser import org.apache.spark.sql.execution.command.DDLUtils import org.apache.spark.sql.execution.datasources.DataSource @@ -440,7 +440,7 @@ class DataFrameReader private[sql](sparkSession: SparkSession) extends Logging { * @since 2.2.0 */ def json(jsonDataset: Dataset[String]): DataFrame = { - val parsedOptions = new JSONOptions( + val parsedOptions = new JSONOptionsInRead( extraOptions.toMap, sparkSession.sessionState.conf.sessionLocalTimeZone, sparkSession.sessionState.conf.columnNameOfCorruptRecord) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala index a664c7338badb..598e50d39cce3 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala @@ -39,7 +39,7 @@ import org.apache.spark.sql.catalyst.catalog.HiveTableRelation import org.apache.spark.sql.catalyst.encoders._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.codegen.GenerateSafeProjection -import org.apache.spark.sql.catalyst.json.{JacksonGenerator, JSONOptions} +import org.apache.spark.sql.catalyst.json.{JacksonGenerator, JSONOptionsInWrite} import org.apache.spark.sql.catalyst.optimizer.CombineUnions import org.apache.spark.sql.catalyst.parser.{ParseException, ParserUtils} import org.apache.spark.sql.catalyst.plans._ @@ -3118,7 +3118,7 @@ class Dataset[T] private[sql]( val writer = new CharArrayWriter() // create the Generator without separator inserted between 2 records val gen = new JacksonGenerator(rowSchema, writer, - new JSONOptions(Map.empty[String, String], sessionLocalTimeZone)) + new JSONOptionsInWrite(Map.empty[String, String], sessionLocalTimeZone)) new Iterator[String] { override def hasNext: Boolean = iter.hasNext diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JsonFileFormat.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JsonFileFormat.scala index 3042133ee43aa..b9b6c67a8f973 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JsonFileFormat.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/JsonFileFormat.scala @@ -67,10 +67,9 @@ class JsonFileFormat extends TextBasedFileFormat with DataSourceRegister { options: Map[String, String], dataSchema: StructType): OutputWriterFactory = { val conf = job.getConfiguration - val parsedOptions = new JSONOptions( + val parsedOptions = new JSONOptionsInWrite( options, - sparkSession.sessionState.conf.sessionLocalTimeZone, - sparkSession.sessionState.conf.columnNameOfCorruptRecord) + sparkSession.sessionState.conf.sessionLocalTimeZone) parsedOptions.compressionCodec.foreach { codec => CompressionCodecs.setCodecConfiguration(conf, codec) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala index dff37ca2d40f0..9415e035d7200 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala @@ -64,7 +64,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData { generator.flush() } - val dummyOption = new JSONOptions(Map.empty[String, String], "GMT") + val dummyOption = new JSONOptionsInRead(Map.empty[String, String], "GMT") val dummySchema = StructType(Seq.empty) val parser = new JacksonParser(dummySchema, dummyOption, allowArrayAsStructs = true) @@ -1372,7 +1372,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData { test("SPARK-6245 JsonInferSchema.infer on empty RDD") { // This is really a test that it doesn't throw an exception - val options = new JSONOptions(Map.empty[String, String], "GMT") + val options = new JSONOptionsInRead(Map.empty[String, String], "GMT") val emptySchema = new JsonInferSchema(options).infer( empty.rdd, CreateJacksonParser.string) @@ -1399,7 +1399,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData { } test("SPARK-8093 Erase empty structs") { - val options = new JSONOptions(Map.empty[String, String], "GMT") + val options = new JSONOptionsInRead(Map.empty[String, String], "GMT") val emptySchema = new JsonInferSchema(options).infer( emptyRecords.rdd, CreateJacksonParser.string) @@ -2324,7 +2324,6 @@ class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData { val ds = spark.createDataset(Seq(("a", 1))).repartition(1) ds.write .option("encoding", encoding) - .option("multiline", false) .json(path.getCanonicalPath) val jsonFiles = path.listFiles().filter(_.getName.endsWith("json")) jsonFiles.foreach { jsonFile => From af1507093f42a206c2c22f6c40cf4c43290244b8 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Fri, 7 Dec 2018 21:34:24 +0100 Subject: [PATCH 2/3] Test for options verification --- .../spark/sql/catalyst/json/JSONOptions.scala | 6 ++++-- .../apache/spark/sql/internal/SQLConf.scala | 10 ++++++++++ .../json/JsonParsingOptionsSuite.scala | 19 +++++++++++++++++++ 3 files changed, 33 insertions(+), 2 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala index 2f6384425857e..230177a46bfd7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala @@ -27,6 +27,7 @@ import org.apache.commons.lang3.time.FastDateFormat import org.apache.spark.internal.Logging import org.apache.spark.sql.catalyst.util._ +import org.apache.spark.sql.internal.SQLConf /** * Options for parsing JSON data into Spark SQL rows. @@ -141,9 +142,10 @@ private[sql] object JSONOptions { parameters: CaseInsensitiveMap[String], option: String, where: String): T = { - if (parameters.get(option).isDefined) { + if (parameters.get(option).isDefined && SQLConf.get.verifyDataSourceOptions) { // scalastyle:off throwerror - throw new NotImplementedError(s"""The JSON option "${option}" is not applicable in $where.""") + throw new IllegalArgumentException( + s"""The JSON option "${option}" is not applicable in $where.""") // scalastyle:on throwerror } else { new Array[T](1)(0) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 6857b8de79758..a1dbef2b918ed 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -1635,6 +1635,14 @@ object SQLConf { "java.time.* packages are used for the same purpose.") .booleanConf .createWithDefault(false) + + val VERIFY_DATASOURCE_OPTIONS = buildConf("spark.sql.verifyDataSourceOptions") + .doc("Options passed to datasource are checked that rather they could be applied in read or " + + "in write when this configuration property is set to true. For example, If an option can " + + "be applied only in read but applied in write, an exception is raised. " + + "To disable the verification, set it to false.") + .booleanConf + .createWithDefault(true) } /** @@ -1820,6 +1828,8 @@ class SQLConf extends Serializable with Logging { def fastHashAggregateRowMaxCapacityBit: Int = getConf(FAST_HASH_AGGREGATE_MAX_ROWS_CAPACITY_BIT) + def verifyDataSourceOptions: Boolean = getConf(VERIFY_DATASOURCE_OPTIONS) + /** * Returns the [[Resolver]] for the current configuration, which can be used to determine if two * identifiers are equal. diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonParsingOptionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonParsingOptionsSuite.scala index 316c5183fddf1..3671d285866d5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonParsingOptionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonParsingOptionsSuite.scala @@ -19,6 +19,7 @@ package org.apache.spark.sql.execution.datasources.json import org.apache.spark.sql.QueryTest import org.apache.spark.sql.catalyst.json.JSONOptions +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSQLContext /** @@ -135,4 +136,22 @@ class JsonParsingOptionsSuite extends QueryTest with SharedSQLContext { assert(df.first().getString(0) == "Cazen Lee") assert(df.first().getString(1) == "$10") } + + test("verify options") { + withTempPath { dir => + def invalidOptionUsage: Unit = { + val ds = Seq("""{"a": "b"}""").toDS() + ds.write.option("dropFieldIfAllNull", true).json(dir.getCanonicalPath) + } + val exception = intercept[IllegalArgumentException] { + invalidOptionUsage + } + assert(exception.getMessage.contains( + """The JSON option "dropFieldIfAllNull" is not applicable in write.""")) + + withSQLConf(SQLConf.VERIFY_DATASOURCE_OPTIONS.key -> "false") { + invalidOptionUsage + } + } + } } From 9ee62acc50bd5fb72a84a620e0fe50e27f7df515 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Sat, 8 Dec 2018 22:31:37 +0100 Subject: [PATCH 3/3] Refactoring --- .../spark/sql/catalyst/json/JSONOptions.scala | 65 ++++++++----------- .../json/JsonParsingOptionsSuite.scala | 2 +- 2 files changed, 27 insertions(+), 40 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala index 230177a46bfd7..0fcc4d1ed606b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala @@ -20,8 +20,6 @@ package org.apache.spark.sql.catalyst.json import java.nio.charset.{Charset, StandardCharsets} import java.util.{Locale, TimeZone} -import scala.reflect.ClassTag - import com.fasterxml.jackson.core.{JsonFactory, JsonParser} import org.apache.commons.lang3.time.FastDateFormat @@ -68,7 +66,7 @@ private[sql] abstract class JSONOptions( parameters.get("allowNonNumericNumbers").map(_.toBoolean).getOrElse(true) val allowBackslashEscapingAnyCharacter = parameters.get("allowBackslashEscapingAnyCharacter").map(_.toBoolean).getOrElse(false) - val allowUnquotedControlChars = + private val allowUnquotedControlChars = parameters.get("allowUnquotedControlChars").map(_.toBoolean).getOrElse(false) val compressionCodec = parameters.get("compression").map(CompressionCodecs.getCodecClassName) val parseMode: ParseMode = @@ -135,20 +133,15 @@ private[sql] abstract class JSONOptions( allowBackslashEscapingAnyCharacter) factory.configure(JsonParser.Feature.ALLOW_UNQUOTED_CONTROL_CHARS, allowUnquotedControlChars) } -} -private[sql] object JSONOptions { - def notApplicable[T: ClassTag]( - parameters: CaseInsensitiveMap[String], - option: String, - where: String): T = { - if (parameters.get(option).isDefined && SQLConf.get.verifyDataSourceOptions) { + def notApplicableOptions: Set[String] + def checkOptions(where: String): Unit = { + val wrongOptions = notApplicableOptions.filter(parameters.contains(_)) + if (!wrongOptions.isEmpty && SQLConf.get.verifyDataSourceOptions) { // scalastyle:off throwerror throw new IllegalArgumentException( - s"""The JSON option "${option}" is not applicable in $where.""") + s"""The JSON options are not applicable $where : ${wrongOptions.mkString(", ")}.""") // scalastyle:on throwerror - } else { - new Array[T](1)(0) } } } @@ -182,12 +175,10 @@ private[sql] class JSONOptionsInRead( enc } - override val compressionCodec: Option[String] = notApplicable[Option[String]]("compression") - override val pretty: Boolean = notApplicable[Boolean]("pretty") - - def notApplicable[T: ClassTag](option: String): T = { - JSONOptions.notApplicable(parameters, option, "read") - } + override def notApplicableOptions: Set[String] = Set( + "compression", + "pretty") + checkOptions("in read") } private[sql] object JSONOptionsInRead { @@ -216,24 +207,20 @@ private[sql] class JSONOptionsInWrite( defaultTimeZoneId) } - override val samplingRatio = notApplicable[Double]("samplingRatio") - override val primitivesAsString = notApplicable[Boolean]("primitivesAsString") - override val prefersDecimal = notApplicable[Boolean]("prefersDecimal") - override val allowComments = notApplicable[Boolean]("allowComments") - override val allowUnquotedFieldNames = notApplicable[Boolean]("allowUnquotedFieldNames") - override val allowSingleQuotes = notApplicable[Boolean]("allowSingleQuotes") - override val allowNumericLeadingZeros = notApplicable[Boolean]("allowNumericLeadingZeros") - override val allowNonNumericNumbers = notApplicable[Boolean]("allowNonNumericNumbers") - override val allowBackslashEscapingAnyCharacter = { - notApplicable[Boolean]("allowBackslashEscapingAnyCharacter") - } - override val allowUnquotedControlChars = notApplicable[Boolean]("allowUnquotedControlChars") - override val parseMode: ParseMode = notApplicable[ParseMode]("mode") - override val columnNameOfCorruptRecord = notApplicable[String]("columnNameOfCorruptRecord") - override val dropFieldIfAllNull = notApplicable[Boolean]("dropFieldIfAllNull") - override val multiLine = notApplicable[Boolean]("multiLine") - - def notApplicable[T: ClassTag](option: String): T = { - JSONOptions.notApplicable(parameters, option, "write") - } + override def notApplicableOptions: Set[String] = Set( + "samplingRatio", + "primitivesAsString", + "prefersDecimal", + "allowComments", + "allowUnquotedFieldNames", + "allowSingleQuotes", + "allowNumericLeadingZeros", + "allowNonNumericNumbers", + "allowBackslashEscapingAnyCharacter", + "allowUnquotedControlChars", + "mode", + "columnNameOfCorruptRecord", + "dropFieldIfAllNull", + "multiLine") + checkOptions("in write") } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonParsingOptionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonParsingOptionsSuite.scala index 3671d285866d5..a63b41914f552 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonParsingOptionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonParsingOptionsSuite.scala @@ -147,7 +147,7 @@ class JsonParsingOptionsSuite extends QueryTest with SharedSQLContext { invalidOptionUsage } assert(exception.getMessage.contains( - """The JSON option "dropFieldIfAllNull" is not applicable in write.""")) + "The JSON options are not applicable in write : dropFieldIfAllNull")) withSQLConf(SQLConf.VERIFY_DATASOURCE_OPTIONS.key -> "false") { invalidOptionUsage