From 50a133ce123d4b909a852991806878c27d05d48d Mon Sep 17 00:00:00 2001 From: Harsh Motwani Date: Thu, 27 Nov 2025 00:02:27 +0000 Subject: [PATCH] minor suite fixes --- .../src/test/scala/org/apache/spark/sql/VariantSuite.scala | 4 +++- .../datasources/parquet/ParquetVariantShreddingSuite.scala | 3 ++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/VariantSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/VariantSuite.scala index 16be9558409c..6510a165e258 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/VariantSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/VariantSuite.scala @@ -820,7 +820,9 @@ class VariantSuite extends QueryTest with SharedSparkSession with ExpressionEval // The initial size of the buffer backing a cached dataframe column is 128KB. // See `ColumnBuilder`. val numKeys = 128 * 1024 - val keyIterator = (0 until numKeys).iterator + // We start in long range because the shredded writer writes int64 by default which wouldn't + // match narrower binaries. + val keyIterator = (Int.MaxValue + 1L until Int.MaxValue + 1L + numKeys).iterator val entries = Array.fill(numKeys)(s"""\"${keyIterator.next()}\": \"test\"""") val jsonStr = s"{${entries.mkString(", ")}}" val query = s"""select named_struct( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetVariantShreddingSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetVariantShreddingSuite.scala index 1f06ddb29bd4..1cc6d3afbee5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetVariantShreddingSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetVariantShreddingSuite.scala @@ -379,7 +379,8 @@ class ParquetVariantShreddingSuite extends QueryTest with ParquetTest with Share "struct>>" withSQLConf(SQLConf.VARIANT_WRITE_SHREDDING_ENABLED.key -> true.toString, SQLConf.VARIANT_ALLOW_READING_SHREDDED.key -> true.toString, - SQLConf.VARIANT_FORCE_SHREDDING_SCHEMA_FOR_TEST.key -> schema) { + SQLConf.VARIANT_FORCE_SHREDDING_SCHEMA_FOR_TEST.key -> schema, + SQLConf.PARQUET_IGNORE_VARIANT_ANNOTATION.key -> true.toString) { df.write.mode("overwrite").parquet(dir.getAbsolutePath) // Verify that we can read the full variant.