Skip to content

Commit

Permalink
[SPARK-6765] Fix test code style for SQL
Browse files Browse the repository at this point in the history
So we can turn style checker on for test code.

Author: Reynold Xin <rxin@databricks.com>

Closes apache#5412 from rxin/test-style-sql and squashes the following commits:

9098a31 [Reynold Xin] One more compilation error ...
8c7250a [Reynold Xin] Fix compilation.
82d0944 [Reynold Xin] Indentation.
0b03fbb [Reynold Xin] code review.
f2f4348 [Reynold Xin] oops.
ef4ec48 [Reynold Xin] Hive module.
7e0db5e [Reynold Xin] sql module
04ec7ac [Reynold Xin] catalyst module
  • Loading branch information
rxin committed Apr 9, 2015
1 parent 891ada5 commit 1b2aab8
Show file tree
Hide file tree
Showing 45 changed files with 395 additions and 234 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class DistributionSuite extends FunSuite {
inputPartitioning: Partitioning,
requiredDistribution: Distribution,
satisfied: Boolean) {
if (inputPartitioning.satisfies(requiredDistribution) != satisfied)
if (inputPartitioning.satisfies(requiredDistribution) != satisfied) {
fail(
s"""
|== Input Partitioning ==
Expand All @@ -40,6 +40,7 @@ class DistributionSuite extends FunSuite {
|== Does input partitioning satisfy required distribution? ==
|Expected $satisfied got ${inputPartitioning.satisfies(requiredDistribution)}
""".stripMargin)
}
}

test("HashPartitioning is the output partitioning") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ import org.apache.spark.sql.types._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._

import scala.collection.immutable

class AnalysisSuite extends FunSuite with BeforeAndAfter {
val caseSensitiveCatalog = new SimpleCatalog(true)
val caseInsensitiveCatalog = new SimpleCatalog(false)
Expand All @@ -41,10 +43,10 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
}


def caseSensitiveAnalyze(plan: LogicalPlan) =
def caseSensitiveAnalyze(plan: LogicalPlan): Unit =
caseSensitiveAnalyzer.checkAnalysis(caseSensitiveAnalyzer(plan))

def caseInsensitiveAnalyze(plan: LogicalPlan) =
def caseInsensitiveAnalyze(plan: LogicalPlan): Unit =
caseInsensitiveAnalyzer.checkAnalysis(caseInsensitiveAnalyzer(plan))

val testRelation = LocalRelation(AttributeReference("a", IntegerType, nullable = true)())
Expand Down Expand Up @@ -147,7 +149,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
name: String,
plan: LogicalPlan,
errorMessages: Seq[String],
caseSensitive: Boolean = true) = {
caseSensitive: Boolean = true): Unit = {
test(name) {
val error = intercept[AnalysisException] {
if(caseSensitive) {
Expand Down Expand Up @@ -202,7 +204,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {

case class UnresolvedTestPlan() extends LeafNode {
override lazy val resolved = false
override def output = Nil
override def output: Seq[Attribute] = Nil
}

errorTest(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,9 @@ class HiveTypeCoercionSuite extends PlanTest {
widenTest(StringType, TimestampType, None)

// ComplexType
widenTest(NullType, MapType(IntegerType, StringType, false), Some(MapType(IntegerType, StringType, false)))
widenTest(NullType,
MapType(IntegerType, StringType, false),
Some(MapType(IntegerType, StringType, false)))
widenTest(NullType, StructType(Seq()), Some(StructType(Seq())))
widenTest(StringType, MapType(IntegerType, StringType, true), None)
widenTest(ArrayType(IntegerType), StructType(Seq()), None)
Expand All @@ -113,7 +115,9 @@ class HiveTypeCoercionSuite extends PlanTest {
// Remove superflous boolean -> boolean casts.
ruleTest(Cast(Literal(true), BooleanType), Literal(true))
// Stringify boolean when casting to string.
ruleTest(Cast(Literal(false), StringType), If(Literal(false), Literal("true"), Literal("false")))
ruleTest(
Cast(Literal(false), StringType),
If(Literal(false), Literal("true"), Literal("false")))
}

test("coalesce casts") {
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -176,40 +176,39 @@ class ConstantFoldingSuite extends PlanTest {
}

test("Constant folding test: expressions have null literals") {
val originalQuery =
testRelation
.select(
IsNull(Literal(null)) as 'c1,
IsNotNull(Literal(null)) as 'c2,
val originalQuery = testRelation.select(
IsNull(Literal(null)) as 'c1,
IsNotNull(Literal(null)) as 'c2,

GetItem(Literal.create(null, ArrayType(IntegerType)), 1) as 'c3,
GetItem(Literal.create(Seq(1), ArrayType(IntegerType)), Literal.create(null, IntegerType)) as 'c4,
UnresolvedGetField(
Literal.create(null, StructType(Seq(StructField("a", IntegerType, true)))),
"a") as 'c5,
GetItem(Literal.create(null, ArrayType(IntegerType)), 1) as 'c3,
GetItem(
Literal.create(Seq(1), ArrayType(IntegerType)), Literal.create(null, IntegerType)) as 'c4,
UnresolvedGetField(
Literal.create(null, StructType(Seq(StructField("a", IntegerType, true)))),
"a") as 'c5,

UnaryMinus(Literal.create(null, IntegerType)) as 'c6,
Cast(Literal(null), IntegerType) as 'c7,
Not(Literal.create(null, BooleanType)) as 'c8,
UnaryMinus(Literal.create(null, IntegerType)) as 'c6,
Cast(Literal(null), IntegerType) as 'c7,
Not(Literal.create(null, BooleanType)) as 'c8,

Add(Literal.create(null, IntegerType), 1) as 'c9,
Add(1, Literal.create(null, IntegerType)) as 'c10,
Add(Literal.create(null, IntegerType), 1) as 'c9,
Add(1, Literal.create(null, IntegerType)) as 'c10,

EqualTo(Literal.create(null, IntegerType), 1) as 'c11,
EqualTo(1, Literal.create(null, IntegerType)) as 'c12,
EqualTo(Literal.create(null, IntegerType), 1) as 'c11,
EqualTo(1, Literal.create(null, IntegerType)) as 'c12,

Like(Literal.create(null, StringType), "abc") as 'c13,
Like("abc", Literal.create(null, StringType)) as 'c14,
Like(Literal.create(null, StringType), "abc") as 'c13,
Like("abc", Literal.create(null, StringType)) as 'c14,

Upper(Literal.create(null, StringType)) as 'c15,
Upper(Literal.create(null, StringType)) as 'c15,

Substring(Literal.create(null, StringType), 0, 1) as 'c16,
Substring("abc", Literal.create(null, IntegerType), 1) as 'c17,
Substring("abc", 0, Literal.create(null, IntegerType)) as 'c18,
Substring(Literal.create(null, StringType), 0, 1) as 'c16,
Substring("abc", Literal.create(null, IntegerType), 1) as 'c17,
Substring("abc", 0, Literal.create(null, IntegerType)) as 'c18,

Contains(Literal.create(null, StringType), "abc") as 'c19,
Contains("abc", Literal.create(null, StringType)) as 'c20
)
Contains(Literal.create(null, StringType), "abc") as 'c19,
Contains("abc", Literal.create(null, StringType)) as 'c20
)

val optimized = Optimize(originalQuery.analyze)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -432,7 +432,8 @@ class FilterPushdownSuite extends PlanTest {

val originalQuery = {
z.join(x.join(y))
.where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1) && ("z.a".attr >= 3) && ("z.a".attr === "x.b".attr))
.where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1) &&
("z.a".attr >= 3) && ("z.a".attr === "x.b".attr))
}

val optimized = Optimize(originalQuery.analyze)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ class OptimizeInSuite extends PlanTest {
val optimized = Optimize(originalQuery.analyze)
val correctAnswer =
testRelation
.where(InSet(UnresolvedAttribute("a"), HashSet[Any]()+1+2))
.where(InSet(UnresolvedAttribute("a"), HashSet[Any]() + 1 + 2))
.analyze

comparePlans(optimized, correctAnswer)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,12 +45,13 @@ class PlanTest extends FunSuite {
protected def comparePlans(plan1: LogicalPlan, plan2: LogicalPlan) {
val normalized1 = normalizeExprIds(plan1)
val normalized2 = normalizeExprIds(plan2)
if (normalized1 != normalized2)
if (normalized1 != normalized2) {
fail(
s"""
|== FAIL: Plans do not match ===
|${sideBySide(normalized1.treeString, normalized2.treeString).mkString("\n")}
""".stripMargin)
""".stripMargin)
}
}

/** Fails the test if the two expressions do not match */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class SameResultSuite extends FunSuite {
val testRelation = LocalRelation('a.int, 'b.int, 'c.int)
val testRelation2 = LocalRelation('a.int, 'b.int, 'c.int)

def assertSameResult(a: LogicalPlan, b: LogicalPlan, result: Boolean = true) = {
def assertSameResult(a: LogicalPlan, b: LogicalPlan, result: Boolean = true): Unit = {
val aAnalyzed = a.analyze
val bAnalyzed = b.analyze

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,12 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types.{StringType, NullType}

case class Dummy(optKey: Option[Expression]) extends Expression {
def children = optKey.toSeq
def nullable = true
def dataType = NullType
def children: Seq[Expression] = optKey.toSeq
def nullable: Boolean = true
def dataType: NullType = NullType
override lazy val resolved = true
type EvaluatedType = Any
def eval(input: Row) = null.asInstanceOf[Any]
def eval(input: Row): Any = null.asInstanceOf[Any]
}

class TreeNodeSuite extends FunSuite {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,8 @@ class CachedTableSuite extends QueryTest {

test("too big for memory") {
val data = "*" * 10000
sparkContext.parallelize(1 to 200000, 1).map(_ => BigData(data)).toDF().registerTempTable("bigData")
sparkContext.parallelize(1 to 200000, 1).map(_ => BigData(data)).toDF()
.registerTempTable("bigData")
table("bigData").persist(StorageLevel.MEMORY_AND_DISK)
assert(table("bigData").count() === 200000L)
table("bigData").unpersist(blocking = true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -329,8 +329,9 @@ class DataFrameSuite extends QueryTest {
checkAnswer(
decimalData.agg(avg('a cast DecimalType(10, 2))),
Row(new java.math.BigDecimal(2.0)))
// non-partial
checkAnswer(
decimalData.agg(avg('a cast DecimalType(10, 2)), sumDistinct('a cast DecimalType(10, 2))), // non-partial
decimalData.agg(avg('a cast DecimalType(10, 2)), sumDistinct('a cast DecimalType(10, 2))),
Row(new java.math.BigDecimal(2.0), new java.math.BigDecimal(6)) :: Nil)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ class QueryTest extends PlanTest {
checkAnswer(df, Seq(expectedAnswer))
}

def sqlTest(sqlString: String, expectedAnswer: Seq[Row])(implicit sqlContext: SQLContext): Unit = {
def sqlTest(sqlString: String, expectedAnswer: Seq[Row])(implicit sqlContext: SQLContext) {
test(sqlString) {
checkAnswer(sqlContext.sql(sqlString), expectedAnswer)
}
Expand Down
30 changes: 22 additions & 8 deletions sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,10 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.002")))

checkAnswer(sql(
"SELECT time FROM timestamps WHERE time IN ('1969-12-31 16:00:00.001','1969-12-31 16:00:00.002')"),
"""
|SELECT time FROM timestamps
|WHERE time IN ('1969-12-31 16:00:00.001','1969-12-31 16:00:00.002')
""".stripMargin),
Seq(Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.001")),
Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.002"))))

Expand Down Expand Up @@ -334,7 +337,7 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
Row("1"))
}

def sortTest() = {
def sortTest(): Unit = {
checkAnswer(
sql("SELECT * FROM testData2 ORDER BY a ASC, b ASC"),
Seq(Row(1,1), Row(1,2), Row(2,1), Row(2,2), Row(3,1), Row(3,2)))
Expand Down Expand Up @@ -413,7 +416,10 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {

test("from follow multiple brackets") {
checkAnswer(sql(
"select key from ((select * from testData limit 1) union all (select * from testData limit 1)) x limit 1"),
"""
|select key from ((select * from testData limit 1)
| union all (select * from testData limit 1)) x limit 1
""".stripMargin),
Row(1)
)

Expand All @@ -423,7 +429,11 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
)

checkAnswer(sql(
"select key from (select * from testData limit 1 union all select * from testData limit 1) x limit 1"),
"""
|select key from
| (select * from testData limit 1 union all select * from testData limit 1) x
| limit 1
""".stripMargin),
Row(1)
)
}
Expand Down Expand Up @@ -470,7 +480,10 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
Seq(Row(1, 0), Row(2, 1)))

checkAnswer(
sql("SELECT COUNT(a), COUNT(b), COUNT(1), COUNT(DISTINCT a), COUNT(DISTINCT b) FROM testData3"),
sql(
"""
|SELECT COUNT(a), COUNT(b), COUNT(1), COUNT(DISTINCT a), COUNT(DISTINCT b) FROM testData3
""".stripMargin),
Row(2, 1, 2, 2, 1))
}

Expand Down Expand Up @@ -1083,7 +1096,8 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
}

test("SPARK-3483 Special chars in column names") {
val data = sparkContext.parallelize(Seq("""{"key?number1": "value1", "key.number2": "value2"}"""))
val data = sparkContext.parallelize(
Seq("""{"key?number1": "value1", "key.number2": "value2"}"""))
jsonRDD(data).registerTempTable("records")
sql("SELECT `key?number1` FROM records")
}
Expand Down Expand Up @@ -1168,8 +1182,8 @@ class SQLQuerySuite extends QueryTest with BeforeAndAfterAll {
}

test("SPARK-6145: ORDER BY test for nested fields") {
jsonRDD(sparkContext.makeRDD(
"""{"a": {"b": 1, "a": {"a": 1}}, "c": [{"d": 1}]}""" :: Nil)).registerTempTable("nestedOrder")
jsonRDD(sparkContext.makeRDD("""{"a": {"b": 1, "a": {"a": 1}}, "c": [{"d": 1}]}""" :: Nil))
.registerTempTable("nestedOrder")

checkAnswer(sql("SELECT 1 FROM nestedOrder ORDER BY a.b"), Row(1))
checkAnswer(sql("SELECT a.b FROM nestedOrder ORDER BY a.b"), Row(1))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ class ScalaReflectionRelationSuite extends FunSuite {

test("query case class RDD") {
val data = ReflectData("a", 1, 1L, 1.toFloat, 1.toDouble, 1.toShort, 1.toByte, true,
new java.math.BigDecimal(1), new Date(12345), new Timestamp(12345), Seq(1,2,3))
new java.math.BigDecimal(1), new Date(12345), new Timestamp(12345), Seq(1,2,3))
val rdd = sparkContext.parallelize(data :: Nil)
rdd.toDF().registerTempTable("reflectData")

Expand All @@ -103,7 +103,8 @@ class ScalaReflectionRelationSuite extends FunSuite {
val rdd = sparkContext.parallelize(data :: Nil)
rdd.toDF().registerTempTable("reflectOptionalData")

assert(sql("SELECT * FROM reflectOptionalData").collect().head === Row.fromSeq(Seq.fill(7)(null)))
assert(sql("SELECT * FROM reflectOptionalData").collect().head ===
Row.fromSeq(Seq.fill(7)(null)))
}

// Equality is broken for Arrays, so we test that separately.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ private[sql] class MyDenseVectorUDT extends UserDefinedType[MyDenseVector] {
}
}

override def userClass = classOf[MyDenseVector]
override def userClass: Class[MyDenseVector] = classOf[MyDenseVector]

private[spark] override def asNullable: MyDenseVectorUDT = this
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
import org.apache.spark.sql.types.{Decimal, DataType, NativeType}

object ColumnarTestUtils {
def makeNullRow(length: Int) = {
def makeNullRow(length: Int): GenericMutableRow = {
val row = new GenericMutableRow(length)
(0 until length).foreach(row.setNullAt)
row
Expand Down Expand Up @@ -93,7 +93,7 @@ object ColumnarTestUtils {

def makeUniqueValuesAndSingleValueRows[T <: NativeType](
columnType: NativeColumnType[T],
count: Int) = {
count: Int): (Seq[T#JvmType], Seq[GenericMutableRow]) = {

val values = makeUniqueRandomValues(columnType, count)
val rows = values.map { value =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@ class TestNullableColumnAccessor[T <: DataType, JvmType](
with NullableColumnAccessor

object TestNullableColumnAccessor {
def apply[T <: DataType, JvmType](buffer: ByteBuffer, columnType: ColumnType[T, JvmType]) = {
def apply[T <: DataType, JvmType](buffer: ByteBuffer, columnType: ColumnType[T, JvmType])
: TestNullableColumnAccessor[T, JvmType] = {
// Skips the column type ID
buffer.getInt()
new TestNullableColumnAccessor(buffer, columnType)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@ class TestNullableColumnBuilder[T <: DataType, JvmType](columnType: ColumnType[T
with NullableColumnBuilder

object TestNullableColumnBuilder {
def apply[T <: DataType, JvmType](columnType: ColumnType[T, JvmType], initialSize: Int = 0) = {
def apply[T <: DataType, JvmType](columnType: ColumnType[T, JvmType], initialSize: Int = 0)
: TestNullableColumnBuilder[T, JvmType] = {
val builder = new TestNullableColumnBuilder(columnType)
builder.initialize(initialSize)
builder
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ object TestCompressibleColumnBuilder {
def apply[T <: NativeType](
columnStats: ColumnStats,
columnType: NativeColumnType[T],
scheme: CompressionScheme) = {
scheme: CompressionScheme): TestCompressibleColumnBuilder[T] = {

val builder = new TestCompressibleColumnBuilder(columnStats, columnType, Seq(scheme))
builder.initialize(0, "", useCompression = true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@ class DebuggingSuite extends FunSuite {
test("DataFrame.typeCheck()") {
testData.typeCheck()
}
}
}
Loading

0 comments on commit 1b2aab8

Please sign in to comment.