Skip to content

Commit

Permalink
add docs to test suite
Browse files Browse the repository at this point in the history
  • Loading branch information
brkyvz committed Aug 26, 2019
1 parent d2f6236 commit f1d86ff
Show file tree
Hide file tree
Showing 6 changed files with 41 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.util.CaseInsensitiveStringMap

class DataSourceV2DataFrameSessionCatalogSuite
extends InsertIntoTests(supportsDynamicOverwrite = true, includeSQLTests = false)
extends InsertIntoTests(supportsDynamicOverwrite = true, includeSQLOnlyTests = false)
with SessionCatalogTest[InMemoryTable, InMemoryTableSessionCatalog] {

import testImplicits._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.sql.sources.v2
import org.apache.spark.sql.{DataFrame, Row, SaveMode}

class DataSourceV2DataFrameSuite
extends InsertIntoTests(supportsDynamicOverwrite = true, includeSQLTests = false) {
extends InsertIntoTests(supportsDynamicOverwrite = true, includeSQLOnlyTests = false) {
import testImplicits._

before {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import org.apache.spark.sql.{DataFrame, SaveMode}
import org.apache.spark.sql.internal.SQLConf.{PARTITION_OVERWRITE_MODE, PartitionOverwriteMode}

class DataSourceV2SQLSessionCatalogSuite
extends InsertIntoTests(supportsDynamicOverwrite = true, includeSQLTests = true)
extends InsertIntoTests(supportsDynamicOverwrite = true, includeSQLOnlyTests = true)
with SessionCatalogTest[InMemoryTable, InMemoryTableSessionCatalog] {

import testImplicits._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ import org.apache.spark.sql.types.{ArrayType, BooleanType, DoubleType, IntegerTy
import org.apache.spark.sql.util.CaseInsensitiveStringMap

class DataSourceV2SQLSuite
extends InsertIntoTests(supportsDynamicOverwrite = true, includeSQLTests = true) {
extends InsertIntoTests(supportsDynamicOverwrite = true, includeSQLOnlyTests = true) {

import org.apache.spark.sql.catalog.v2.CatalogV2Implicits._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,31 @@ import org.apache.spark.sql._
import org.apache.spark.sql.internal.SQLConf.{PARTITION_OVERWRITE_MODE, PartitionOverwriteMode}
import org.apache.spark.sql.test.SharedSparkSession

/**
* A collection of "INSERT INTO" tests that can be run through the SQL or DataFrameWriter APIs.
* Extending test suites can implement the `doInsert` method to run the insert through either
* API.
*
* @param supportsDynamicOverwrite Whether the Table implementations used in the test suite support
* dynamic partition overwrites. If they do, we will check for the
* success of the operations. If not, then we will check that we
* failed with the right error message.
* @param includeSQLOnlyTests Certain INSERT INTO behavior can be achieved purely through SQL, e.g.
* static or dynamic partition overwrites. This flag should be set to
* true if we would like to test these cases.
*/
abstract class InsertIntoTests(
override protected val supportsDynamicOverwrite: Boolean,
override protected val includeSQLTests: Boolean) extends InsertIntoSQLTests {
override protected val includeSQLOnlyTests: Boolean) extends InsertIntoSQLOnlyTests {

import testImplicits._

/**
* Insert data into a table using the insertInto statement. Implementations can be in SQL
* ("INSERT") or using the DataFrameWriter (`df.write.insertInto`).
*/
protected def doInsert(tableName: String, insert: DataFrame, mode: SaveMode = null): Unit

test("insertInto: append") {
val t1 = s"${catalogAndNamespace}tbl"
sql(s"CREATE TABLE $t1 (id bigint, data string) USING $v2Format")
Expand Down Expand Up @@ -156,16 +175,28 @@ abstract class InsertIntoTests(
}
}

private[v2] trait InsertIntoSQLTests extends QueryTest with SharedSparkSession with BeforeAndAfter {
private[v2] trait InsertIntoSQLOnlyTests
extends QueryTest
with SharedSparkSession
with BeforeAndAfter {

import testImplicits._

protected def doInsert(tableName: String, insert: DataFrame, mode: SaveMode = null): Unit
/** Check that the results in `tableName` match the `expected` DataFrame. */
protected def verifyTable(tableName: String, expected: DataFrame): Unit

protected val v2Format: String
protected val catalogAndNamespace: String

/**
* Whether dynamic partition overwrites are supported by the `Table` definitions used in the
* test suites. Tables that leverage the V1 Write interface do not support dynamic partition
* overwrites.
*/
protected val supportsDynamicOverwrite: Boolean
protected val includeSQLTests: Boolean

/** Whether to include the SQL specific tests in this trait within the extending test suite. */
protected val includeSQLOnlyTests: Boolean

private def withTableAndData(tableName: String)(testFn: String => Unit): Unit = {
withTable(tableName) {
Expand Down Expand Up @@ -194,7 +225,7 @@ private[v2] trait InsertIntoSQLTests extends QueryTest with SharedSparkSession w
}
}

if (includeSQLTests) {
if (includeSQLOnlyTests) {
test("InsertInto: when the table doesn't exist") {
val t1 = s"${catalogAndNamespace}tbl"
val t2 = s"${catalogAndNamespace}tbl2"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ class V1WriteFallbackSuite extends QueryTest with SharedSparkSession with Before
}

class V1WriteFallbackSessionCatalogSuite
extends InsertIntoTests(supportsDynamicOverwrite = false, includeSQLTests = true)
extends InsertIntoTests(supportsDynamicOverwrite = false, includeSQLOnlyTests = true)
with SessionCatalogTest[InMemoryTableWithV1Fallback, V1FallbackTableCatalog] {

override protected val v2Format = classOf[InMemoryV1Provider].getName
Expand Down

0 comments on commit f1d86ff

Please sign in to comment.