From 17bc9c0b259be87782e313f18b2b88de134811af Mon Sep 17 00:00:00 2001 From: petermaxlee Date: Wed, 10 Aug 2016 18:00:05 -0700 Subject: [PATCH 1/2] [SPARK-17007][SQL] Move test data files into a test-data folder --- .../apache/spark/sql/JavaDataFrameSuite.java | 12 +++---- .../src/test/resources/old-repeated.parquet | Bin 432 -> 0 bytes .../test/resources/{ => test-data}/bool.csv | 0 .../{ => test-data}/cars-alternative.csv | 0 .../cars-blank-column-name.csv | 0 .../{ => test-data}/cars-malformed.csv | 0 .../resources/{ => test-data}/cars-null.csv | 0 .../cars-unbalanced-quotes.csv | 0 .../test/resources/{ => test-data}/cars.csv | 0 .../test/resources/{ => test-data}/cars.tsv | 0 .../{ => test-data}/cars_iso-8859-1.csv | 0 .../resources/{ => test-data}/comments.csv | 0 .../test/resources/{ => test-data}/dates.csv | 0 .../{ => test-data}/dec-in-fixed-len.parquet | Bin .../{ => test-data}/dec-in-i32.parquet | Bin .../{ => test-data}/dec-in-i64.parquet | Bin .../resources/{ => test-data}/decimal.csv | 0 .../{ => test-data}/disable_comments.csv | 0 .../test/resources/{ => test-data}/empty.csv | 0 .../nested-array-struct.parquet | Bin .../resources/{ => test-data}/numbers.csv | 0 .../{ => test-data}/old-repeated-int.parquet | Bin .../old-repeated-message.parquet | Bin .../parquet-thrift-compat.snappy.parquet | Bin .../proto-repeated-string.parquet | Bin .../proto-repeated-struct.parquet | Bin .../proto-struct-with-array-many.parquet | Bin .../proto-struct-with-array.parquet | Bin .../{ => test-data}/simple_sparse.csv | 0 .../text-partitioned/year=2014/data.txt | 0 .../text-partitioned/year=2015/data.txt | 0 .../resources/{ => test-data}/text-suite.txt | 0 .../resources/{ => test-data}/text-suite2.txt | 0 .../{ => test-data}/unescaped-quotes.csv | 0 .../execution/datasources/csv/CSVSuite.scala | 34 +++++++++--------- .../datasources/parquet/ParquetIOSuite.scala | 6 ++-- .../ParquetProtobufCompatibilitySuite.scala | 14 ++++---- .../ParquetThriftCompatibilitySuite.scala | 4 +-- .../datasources/text/TextSuite.scala | 6 ++-- 39 files changed, 38 insertions(+), 38 deletions(-) delete mode 100644 sql/core/src/test/resources/old-repeated.parquet rename sql/core/src/test/resources/{ => test-data}/bool.csv (100%) rename sql/core/src/test/resources/{ => test-data}/cars-alternative.csv (100%) rename sql/core/src/test/resources/{ => test-data}/cars-blank-column-name.csv (100%) rename sql/core/src/test/resources/{ => test-data}/cars-malformed.csv (100%) rename sql/core/src/test/resources/{ => test-data}/cars-null.csv (100%) rename sql/core/src/test/resources/{ => test-data}/cars-unbalanced-quotes.csv (100%) rename sql/core/src/test/resources/{ => test-data}/cars.csv (100%) rename sql/core/src/test/resources/{ => test-data}/cars.tsv (100%) rename sql/core/src/test/resources/{ => test-data}/cars_iso-8859-1.csv (100%) rename sql/core/src/test/resources/{ => test-data}/comments.csv (100%) rename sql/core/src/test/resources/{ => test-data}/dates.csv (100%) rename sql/core/src/test/resources/{ => test-data}/dec-in-fixed-len.parquet (100%) rename sql/core/src/test/resources/{ => test-data}/dec-in-i32.parquet (100%) rename sql/core/src/test/resources/{ => test-data}/dec-in-i64.parquet (100%) rename sql/core/src/test/resources/{ => test-data}/decimal.csv (100%) rename sql/core/src/test/resources/{ => test-data}/disable_comments.csv (100%) rename sql/core/src/test/resources/{ => test-data}/empty.csv (100%) rename sql/core/src/test/resources/{ => test-data}/nested-array-struct.parquet (100%) rename sql/core/src/test/resources/{ => test-data}/numbers.csv (100%) rename sql/core/src/test/resources/{ => test-data}/old-repeated-int.parquet (100%) rename sql/core/src/test/resources/{ => test-data}/old-repeated-message.parquet (100%) rename sql/core/src/test/resources/{ => test-data}/parquet-thrift-compat.snappy.parquet (100%) rename sql/core/src/test/resources/{ => test-data}/proto-repeated-string.parquet (100%) rename sql/core/src/test/resources/{ => test-data}/proto-repeated-struct.parquet (100%) rename sql/core/src/test/resources/{ => test-data}/proto-struct-with-array-many.parquet (100%) rename sql/core/src/test/resources/{ => test-data}/proto-struct-with-array.parquet (100%) rename sql/core/src/test/resources/{ => test-data}/simple_sparse.csv (100%) rename sql/core/src/test/resources/{ => test-data}/text-partitioned/year=2014/data.txt (100%) rename sql/core/src/test/resources/{ => test-data}/text-partitioned/year=2015/data.txt (100%) rename sql/core/src/test/resources/{ => test-data}/text-suite.txt (100%) rename sql/core/src/test/resources/{ => test-data}/text-suite2.txt (100%) rename sql/core/src/test/resources/{ => test-data}/unescaped-quotes.csv (100%) diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java index 318b53cdbbaa0..c44fc3d393862 100644 --- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java +++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java @@ -327,23 +327,23 @@ private String getResource(String resource) { @Test public void testGenericLoad() { - Dataset df1 = spark.read().format("text").load(getResource("text-suite.txt")); + Dataset df1 = spark.read().format("text").load(getResource("test-data/text-suite.txt")); Assert.assertEquals(4L, df1.count()); Dataset df2 = spark.read().format("text").load( - getResource("text-suite.txt"), - getResource("text-suite2.txt")); + getResource("test-data/text-suite.txt"), + getResource("test-data/text-suite2.txt")); Assert.assertEquals(5L, df2.count()); } @Test public void testTextLoad() { - Dataset ds1 = spark.read().textFile(getResource("text-suite.txt")); + Dataset ds1 = spark.read().textFile(getResource("test-data/text-suite.txt")); Assert.assertEquals(4L, ds1.count()); Dataset ds2 = spark.read().textFile( - getResource("text-suite.txt"), - getResource("text-suite2.txt")); + getResource("test-data/text-suite.txt"), + getResource("test-data/text-suite2.txt")); Assert.assertEquals(5L, ds2.count()); } diff --git a/sql/core/src/test/resources/old-repeated.parquet b/sql/core/src/test/resources/old-repeated.parquet deleted file mode 100644 index 213f1a90291b30a8a3161b51c38f008f3ae9f6e5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 432 zcmZWm!D@p*5ZxNF!5+)X3PMGioUA16Ei?y9g$B|h;-#ms>Ld--Xm{5`DgF13A<&42 znSH!BJNtMWhsm50I-@h68VC$(I7}ZALYRJm-NGUo*2p;a%Z@xEJgH{;FE=Sj6^mNc zS-TAqXn-pyRtNP8Qt};84d*60yAuBru{7JUo$1)Y6%&KlJ(Uv6u!JS1zOP)cw zaM$5ewB9699EEB0jJ*18aDDn7N1N4K`fzXlnuJ~V?c^nwk}Yeo3wXox4+#3Y!pMU2 V+-`?%2{TWZ?kYh(G4~k%>JK8=aDe~- diff --git a/sql/core/src/test/resources/bool.csv b/sql/core/src/test/resources/test-data/bool.csv similarity index 100% rename from sql/core/src/test/resources/bool.csv rename to sql/core/src/test/resources/test-data/bool.csv diff --git a/sql/core/src/test/resources/cars-alternative.csv b/sql/core/src/test/resources/test-data/cars-alternative.csv similarity index 100% rename from sql/core/src/test/resources/cars-alternative.csv rename to sql/core/src/test/resources/test-data/cars-alternative.csv diff --git a/sql/core/src/test/resources/cars-blank-column-name.csv b/sql/core/src/test/resources/test-data/cars-blank-column-name.csv similarity index 100% rename from sql/core/src/test/resources/cars-blank-column-name.csv rename to sql/core/src/test/resources/test-data/cars-blank-column-name.csv diff --git a/sql/core/src/test/resources/cars-malformed.csv b/sql/core/src/test/resources/test-data/cars-malformed.csv similarity index 100% rename from sql/core/src/test/resources/cars-malformed.csv rename to sql/core/src/test/resources/test-data/cars-malformed.csv diff --git a/sql/core/src/test/resources/cars-null.csv b/sql/core/src/test/resources/test-data/cars-null.csv similarity index 100% rename from sql/core/src/test/resources/cars-null.csv rename to sql/core/src/test/resources/test-data/cars-null.csv diff --git a/sql/core/src/test/resources/cars-unbalanced-quotes.csv b/sql/core/src/test/resources/test-data/cars-unbalanced-quotes.csv similarity index 100% rename from sql/core/src/test/resources/cars-unbalanced-quotes.csv rename to sql/core/src/test/resources/test-data/cars-unbalanced-quotes.csv diff --git a/sql/core/src/test/resources/cars.csv b/sql/core/src/test/resources/test-data/cars.csv similarity index 100% rename from sql/core/src/test/resources/cars.csv rename to sql/core/src/test/resources/test-data/cars.csv diff --git a/sql/core/src/test/resources/cars.tsv b/sql/core/src/test/resources/test-data/cars.tsv similarity index 100% rename from sql/core/src/test/resources/cars.tsv rename to sql/core/src/test/resources/test-data/cars.tsv diff --git a/sql/core/src/test/resources/cars_iso-8859-1.csv b/sql/core/src/test/resources/test-data/cars_iso-8859-1.csv similarity index 100% rename from sql/core/src/test/resources/cars_iso-8859-1.csv rename to sql/core/src/test/resources/test-data/cars_iso-8859-1.csv diff --git a/sql/core/src/test/resources/comments.csv b/sql/core/src/test/resources/test-data/comments.csv similarity index 100% rename from sql/core/src/test/resources/comments.csv rename to sql/core/src/test/resources/test-data/comments.csv diff --git a/sql/core/src/test/resources/dates.csv b/sql/core/src/test/resources/test-data/dates.csv similarity index 100% rename from sql/core/src/test/resources/dates.csv rename to sql/core/src/test/resources/test-data/dates.csv diff --git a/sql/core/src/test/resources/dec-in-fixed-len.parquet b/sql/core/src/test/resources/test-data/dec-in-fixed-len.parquet similarity index 100% rename from sql/core/src/test/resources/dec-in-fixed-len.parquet rename to sql/core/src/test/resources/test-data/dec-in-fixed-len.parquet diff --git a/sql/core/src/test/resources/dec-in-i32.parquet b/sql/core/src/test/resources/test-data/dec-in-i32.parquet similarity index 100% rename from sql/core/src/test/resources/dec-in-i32.parquet rename to sql/core/src/test/resources/test-data/dec-in-i32.parquet diff --git a/sql/core/src/test/resources/dec-in-i64.parquet b/sql/core/src/test/resources/test-data/dec-in-i64.parquet similarity index 100% rename from sql/core/src/test/resources/dec-in-i64.parquet rename to sql/core/src/test/resources/test-data/dec-in-i64.parquet diff --git a/sql/core/src/test/resources/decimal.csv b/sql/core/src/test/resources/test-data/decimal.csv similarity index 100% rename from sql/core/src/test/resources/decimal.csv rename to sql/core/src/test/resources/test-data/decimal.csv diff --git a/sql/core/src/test/resources/disable_comments.csv b/sql/core/src/test/resources/test-data/disable_comments.csv similarity index 100% rename from sql/core/src/test/resources/disable_comments.csv rename to sql/core/src/test/resources/test-data/disable_comments.csv diff --git a/sql/core/src/test/resources/empty.csv b/sql/core/src/test/resources/test-data/empty.csv similarity index 100% rename from sql/core/src/test/resources/empty.csv rename to sql/core/src/test/resources/test-data/empty.csv diff --git a/sql/core/src/test/resources/nested-array-struct.parquet b/sql/core/src/test/resources/test-data/nested-array-struct.parquet similarity index 100% rename from sql/core/src/test/resources/nested-array-struct.parquet rename to sql/core/src/test/resources/test-data/nested-array-struct.parquet diff --git a/sql/core/src/test/resources/numbers.csv b/sql/core/src/test/resources/test-data/numbers.csv similarity index 100% rename from sql/core/src/test/resources/numbers.csv rename to sql/core/src/test/resources/test-data/numbers.csv diff --git a/sql/core/src/test/resources/old-repeated-int.parquet b/sql/core/src/test/resources/test-data/old-repeated-int.parquet similarity index 100% rename from sql/core/src/test/resources/old-repeated-int.parquet rename to sql/core/src/test/resources/test-data/old-repeated-int.parquet diff --git a/sql/core/src/test/resources/old-repeated-message.parquet b/sql/core/src/test/resources/test-data/old-repeated-message.parquet similarity index 100% rename from sql/core/src/test/resources/old-repeated-message.parquet rename to sql/core/src/test/resources/test-data/old-repeated-message.parquet diff --git a/sql/core/src/test/resources/parquet-thrift-compat.snappy.parquet b/sql/core/src/test/resources/test-data/parquet-thrift-compat.snappy.parquet similarity index 100% rename from sql/core/src/test/resources/parquet-thrift-compat.snappy.parquet rename to sql/core/src/test/resources/test-data/parquet-thrift-compat.snappy.parquet diff --git a/sql/core/src/test/resources/proto-repeated-string.parquet b/sql/core/src/test/resources/test-data/proto-repeated-string.parquet similarity index 100% rename from sql/core/src/test/resources/proto-repeated-string.parquet rename to sql/core/src/test/resources/test-data/proto-repeated-string.parquet diff --git a/sql/core/src/test/resources/proto-repeated-struct.parquet b/sql/core/src/test/resources/test-data/proto-repeated-struct.parquet similarity index 100% rename from sql/core/src/test/resources/proto-repeated-struct.parquet rename to sql/core/src/test/resources/test-data/proto-repeated-struct.parquet diff --git a/sql/core/src/test/resources/proto-struct-with-array-many.parquet b/sql/core/src/test/resources/test-data/proto-struct-with-array-many.parquet similarity index 100% rename from sql/core/src/test/resources/proto-struct-with-array-many.parquet rename to sql/core/src/test/resources/test-data/proto-struct-with-array-many.parquet diff --git a/sql/core/src/test/resources/proto-struct-with-array.parquet b/sql/core/src/test/resources/test-data/proto-struct-with-array.parquet similarity index 100% rename from sql/core/src/test/resources/proto-struct-with-array.parquet rename to sql/core/src/test/resources/test-data/proto-struct-with-array.parquet diff --git a/sql/core/src/test/resources/simple_sparse.csv b/sql/core/src/test/resources/test-data/simple_sparse.csv similarity index 100% rename from sql/core/src/test/resources/simple_sparse.csv rename to sql/core/src/test/resources/test-data/simple_sparse.csv diff --git a/sql/core/src/test/resources/text-partitioned/year=2014/data.txt b/sql/core/src/test/resources/test-data/text-partitioned/year=2014/data.txt similarity index 100% rename from sql/core/src/test/resources/text-partitioned/year=2014/data.txt rename to sql/core/src/test/resources/test-data/text-partitioned/year=2014/data.txt diff --git a/sql/core/src/test/resources/text-partitioned/year=2015/data.txt b/sql/core/src/test/resources/test-data/text-partitioned/year=2015/data.txt similarity index 100% rename from sql/core/src/test/resources/text-partitioned/year=2015/data.txt rename to sql/core/src/test/resources/test-data/text-partitioned/year=2015/data.txt diff --git a/sql/core/src/test/resources/text-suite.txt b/sql/core/src/test/resources/test-data/text-suite.txt similarity index 100% rename from sql/core/src/test/resources/text-suite.txt rename to sql/core/src/test/resources/test-data/text-suite.txt diff --git a/sql/core/src/test/resources/text-suite2.txt b/sql/core/src/test/resources/test-data/text-suite2.txt similarity index 100% rename from sql/core/src/test/resources/text-suite2.txt rename to sql/core/src/test/resources/test-data/text-suite2.txt diff --git a/sql/core/src/test/resources/unescaped-quotes.csv b/sql/core/src/test/resources/test-data/unescaped-quotes.csv similarity index 100% rename from sql/core/src/test/resources/unescaped-quotes.csv rename to sql/core/src/test/resources/test-data/unescaped-quotes.csv diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala index 311f1fa8d2aff..8cd76ddf20f04 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/csv/CSVSuite.scala @@ -33,23 +33,23 @@ import org.apache.spark.sql.types._ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils { import testImplicits._ - private val carsFile = "cars.csv" - private val carsMalformedFile = "cars-malformed.csv" - private val carsFile8859 = "cars_iso-8859-1.csv" - private val carsTsvFile = "cars.tsv" - private val carsAltFile = "cars-alternative.csv" - private val carsUnbalancedQuotesFile = "cars-unbalanced-quotes.csv" - private val carsNullFile = "cars-null.csv" - private val carsBlankColName = "cars-blank-column-name.csv" - private val emptyFile = "empty.csv" - private val commentsFile = "comments.csv" - private val disableCommentsFile = "disable_comments.csv" - private val boolFile = "bool.csv" - private val decimalFile = "decimal.csv" - private val simpleSparseFile = "simple_sparse.csv" - private val numbersFile = "numbers.csv" - private val datesFile = "dates.csv" - private val unescapedQuotesFile = "unescaped-quotes.csv" + private val carsFile = "test-data/cars.csv" + private val carsMalformedFile = "test-data/cars-malformed.csv" + private val carsFile8859 = "test-data/cars_iso-8859-1.csv" + private val carsTsvFile = "test-data/cars.tsv" + private val carsAltFile = "test-data/cars-alternative.csv" + private val carsUnbalancedQuotesFile = "test-data/cars-unbalanced-quotes.csv" + private val carsNullFile = "test-data/cars-null.csv" + private val carsBlankColName = "test-data/cars-blank-column-name.csv" + private val emptyFile = "test-data/empty.csv" + private val commentsFile = "test-data/comments.csv" + private val disableCommentsFile = "test-data/disable_comments.csv" + private val boolFile = "test-data/bool.csv" + private val decimalFile = "test-data/decimal.csv" + private val simpleSparseFile = "test-data/simple_sparse.csv" + private val numbersFile = "test-data/numbers.csv" + private val datesFile = "test-data/dates.csv" + private val unescapedQuotesFile = "test-data/unescaped-quotes.csv" private def testFile(fileName: String): String = { Thread.currentThread().getContextClassLoader.getResource(fileName).toString diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala index 0f74094699abd..4aa046bd91e02 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala @@ -568,7 +568,7 @@ class ParquetIOSuite extends QueryTest with ParquetTest with SharedSQLContext { withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> vectorized) { checkAnswer( // Decimal column in this file is encoded using plain dictionary - readResourceParquetFile("dec-in-i32.parquet"), + readResourceParquetFile("test-data/dec-in-i32.parquet"), spark.range(1 << 4).select('id % 10 cast DecimalType(5, 2) as 'i32_dec)) } } @@ -579,7 +579,7 @@ class ParquetIOSuite extends QueryTest with ParquetTest with SharedSQLContext { withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> vectorized) { checkAnswer( // Decimal column in this file is encoded using plain dictionary - readResourceParquetFile("dec-in-i64.parquet"), + readResourceParquetFile("test-data/dec-in-i64.parquet"), spark.range(1 << 4).select('id % 10 cast DecimalType(10, 2) as 'i64_dec)) } } @@ -590,7 +590,7 @@ class ParquetIOSuite extends QueryTest with ParquetTest with SharedSQLContext { withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> vectorized) { checkAnswer( // Decimal column in this file is encoded using plain dictionary - readResourceParquetFile("dec-in-fixed-len.parquet"), + readResourceParquetFile("test-data/dec-in-fixed-len.parquet"), spark.range(1 << 4).select('id % 10 cast DecimalType(10, 2) as 'fixed_len_dec)) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetProtobufCompatibilitySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetProtobufCompatibilitySuite.scala index 98333e58cada8..fa88019298a69 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetProtobufCompatibilitySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetProtobufCompatibilitySuite.scala @@ -22,12 +22,12 @@ import org.apache.spark.sql.test.SharedSQLContext class ParquetProtobufCompatibilitySuite extends ParquetCompatibilityTest with SharedSQLContext { test("unannotated array of primitive type") { - checkAnswer(readResourceParquetFile("old-repeated-int.parquet"), Row(Seq(1, 2, 3))) + checkAnswer(readResourceParquetFile("test-data/old-repeated-int.parquet"), Row(Seq(1, 2, 3))) } test("unannotated array of struct") { checkAnswer( - readResourceParquetFile("old-repeated-message.parquet"), + readResourceParquetFile("test-data/old-repeated-message.parquet"), Row( Seq( Row("First inner", null, null), @@ -35,14 +35,14 @@ class ParquetProtobufCompatibilitySuite extends ParquetCompatibilityTest with Sh Row(null, null, "Third inner")))) checkAnswer( - readResourceParquetFile("proto-repeated-struct.parquet"), + readResourceParquetFile("test-data/proto-repeated-struct.parquet"), Row( Seq( Row("0 - 1", "0 - 2", "0 - 3"), Row("1 - 1", "1 - 2", "1 - 3")))) checkAnswer( - readResourceParquetFile("proto-struct-with-array-many.parquet"), + readResourceParquetFile("test-data/proto-struct-with-array-many.parquet"), Seq( Row( Seq( @@ -60,13 +60,13 @@ class ParquetProtobufCompatibilitySuite extends ParquetCompatibilityTest with Sh test("struct with unannotated array") { checkAnswer( - readResourceParquetFile("proto-struct-with-array.parquet"), + readResourceParquetFile("test-data/proto-struct-with-array.parquet"), Row(10, 9, Seq.empty, null, Row(9), Seq(Row(9), Row(10)))) } test("unannotated array of struct with unannotated array") { checkAnswer( - readResourceParquetFile("nested-array-struct.parquet"), + readResourceParquetFile("test-data/nested-array-struct.parquet"), Seq( Row(2, Seq(Row(1, Seq(Row(3))))), Row(5, Seq(Row(4, Seq(Row(6))))), @@ -75,7 +75,7 @@ class ParquetProtobufCompatibilitySuite extends ParquetCompatibilityTest with Sh test("unannotated array of string") { checkAnswer( - readResourceParquetFile("proto-repeated-string.parquet"), + readResourceParquetFile("test-data/proto-repeated-string.parquet"), Seq( Row(Seq("hello", "world")), Row(Seq("good", "bye")), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetThriftCompatibilitySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetThriftCompatibilitySuite.scala index ff5706999a6dd..4157a5b46dc42 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetThriftCompatibilitySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetThriftCompatibilitySuite.scala @@ -23,8 +23,8 @@ import org.apache.spark.sql.test.SharedSQLContext class ParquetThriftCompatibilitySuite extends ParquetCompatibilityTest with SharedSQLContext { import ParquetCompatibilityTest._ - private val parquetFilePath = - Thread.currentThread().getContextClassLoader.getResource("parquet-thrift-compat.snappy.parquet") + private val parquetFilePath = Thread.currentThread().getContextClassLoader.getResource( + "test-data/parquet-thrift-compat.snappy.parquet") test("Read Parquet file generated by parquet-thrift") { logInfo( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/text/TextSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/text/TextSuite.scala index 71d3da915840a..d11c2acb815d4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/text/TextSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/text/TextSuite.scala @@ -66,7 +66,7 @@ class TextSuite extends QueryTest with SharedSQLContext { test("reading partitioned data using read.textFile()") { val partitionedData = Thread.currentThread().getContextClassLoader - .getResource("text-partitioned").toString + .getResource("test-data/text-partitioned").toString val ds = spark.read.textFile(partitionedData) val data = ds.collect() @@ -76,7 +76,7 @@ class TextSuite extends QueryTest with SharedSQLContext { test("support for partitioned reading using read.text()") { val partitionedData = Thread.currentThread().getContextClassLoader - .getResource("text-partitioned").toString + .getResource("test-data/text-partitioned").toString val df = spark.read.text(partitionedData) val data = df.filter("year = '2015'").select("value").collect() @@ -155,7 +155,7 @@ class TextSuite extends QueryTest with SharedSQLContext { } private def testFile: String = { - Thread.currentThread().getContextClassLoader.getResource("text-suite.txt").toString + Thread.currentThread().getContextClassLoader.getResource("test-data/text-suite.txt").toString } /** Verifies data and schema. */ From 3bc7c03cb7ea226e2ace29e771b9b64eee91d13d Mon Sep 17 00:00:00 2001 From: petermaxlee Date: Wed, 10 Aug 2016 19:37:43 -0700 Subject: [PATCH 2/2] fix test --- .../org/apache/spark/sql/execution/command/DDLSuite.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index e14e84e0a7642..ce1f7c5082ca1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -677,7 +677,8 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { } test("create temporary view using") { - val csvFile = Thread.currentThread().getContextClassLoader.getResource("cars.csv").toString() + val csvFile = + Thread.currentThread().getContextClassLoader.getResource("test-data/cars.csv").toString withView("testview") { sql(s"CREATE OR REPLACE TEMPORARY VIEW testview (c1: String, c2: String) USING " + "org.apache.spark.sql.execution.datasources.csv.CSVFileFormat " +