From 018c6818762402445d408f48c30f5a2d0a97bf5e Mon Sep 17 00:00:00 2001 From: tedyu Date: Thu, 17 Mar 2016 09:25:52 -0700 Subject: [PATCH 1/9] [SPARK-12719][HOTFIX] Fix compilation against Scala 2.10 --- .../org/apache/spark/sql/hive/SQLBuilder.scala | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala index 05dfad239aa69..ba14ff2d23016 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala @@ -481,14 +481,14 @@ class SQLBuilder(logicalPlan: LogicalPlan, sqlContext: SQLContext) extends Loggi } private def addSubqueryIfNeeded(plan: LogicalPlan): LogicalPlan = plan match { - case _: SubqueryAlias | - _: Filter | - _: Join | - _: LocalLimit | - _: GlobalLimit | - _: SQLTable | - _: Generate | - OneRowRelation => plan + case _: SubqueryAlias => plan + case _: Filter => plan + case _: Join => plan + case _: LocalLimit => plan + case _: GlobalLimit => plan + case _: SQLTable => plan + case _: Generate => plan + case _: OneRowRelation => plan case _ => addSubquery(plan) } } From e565c62aa825745a663ffeb659db33b90d1ddbd7 Mon Sep 17 00:00:00 2001 From: tedyu Date: Thu, 17 Mar 2016 14:11:23 -0700 Subject: [PATCH 2/9] Change OneRowRelation to class --- .../spark/sql/catalyst/plans/logical/basicOperators.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala index 09ea3fea6a694..2f681e665d7bd 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala @@ -673,7 +673,7 @@ case class Repartition(numPartitions: Int, shuffle: Boolean, child: LogicalPlan) /** * A relation with one row. This is used in "SELECT ..." without a from clause. */ -case object OneRowRelation extends LeafNode { +case class OneRowRelation() extends LeafNode { override def maxRows: Option[Long] = Some(1) override def output: Seq[Attribute] = Nil From dad73b7ad3edfa311bc48cbe05089d9f9863adac Mon Sep 17 00:00:00 2001 From: tedyu Date: Thu, 17 Mar 2016 14:23:56 -0700 Subject: [PATCH 3/9] Use OneRowRelation ctor properly --- .../scala/org/apache/spark/sql/catalyst/parser/CatalystQl.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/CatalystQl.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/CatalystQl.scala index 7d5a46873c217..415e64dad72f0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/CatalystQl.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/CatalystQl.scala @@ -201,7 +201,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C val relations = fromClause match { case Some(f) => nodeToRelation(f) - case None => OneRowRelation + case None => OneRowRelation() } val withLateralView = lateralViewClause.map { lv => From eb1aebfa5afa67088d322bec2d1a4c6283e192e8 Mon Sep 17 00:00:00 2001 From: tedyu Date: Thu, 17 Mar 2016 14:42:20 -0700 Subject: [PATCH 4/9] Use OneRowRelation ctor in tests --- .../sql/catalyst/expressions/ExpressionEvalHelper.scala | 2 +- .../sql/catalyst/expressions/MathFunctionsSuite.scala | 2 +- .../spark/sql/catalyst/optimizer/ColumnPruningSuite.scala | 2 +- .../optimizer/EliminateSubqueryAliasesSuite.scala | 4 ++-- .../sql/catalyst/optimizer/SimplifyConditionalSuite.scala | 2 +- .../spark/sql/catalyst/parser/CatalystQlSuite.scala | 8 ++++---- .../org/apache/spark/sql/catalyst/plans/PlanTest.scala | 2 +- 7 files changed, 11 insertions(+), 11 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala index cf26d4843d84f..bb5eebec6aa78 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala @@ -152,7 +152,7 @@ trait ExpressionEvalHelper extends GeneratorDrivenPropertyChecks { expression: Expression, expected: Any, inputRow: InternalRow = EmptyRow): Unit = { - val plan = Project(Alias(expression, s"Optimized($expression)")() :: Nil, OneRowRelation) + val plan = Project(Alias(expression, s"Optimized($expression)")() :: Nil, OneRowRelation()) val optimizedPlan = DefaultOptimizer.execute(plan) checkEvaluationWithoutCodegen(optimizedPlan.expressions.head, expected, inputRow) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala index bd674dadd0fcc..5db5989af016d 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala @@ -150,7 +150,7 @@ class MathFunctionsSuite extends SparkFunSuite with ExpressionEvalHelper { private def checkNaNWithOptimization( expression: Expression, inputRow: InternalRow = EmptyRow): Unit = { - val plan = Project(Alias(expression, s"Optimized($expression)")() :: Nil, OneRowRelation) + val plan = Project(Alias(expression, s"Optimized($expression)")() :: Nil, OneRowRelation()) val optimizedPlan = DefaultOptimizer.execute(plan) checkNaNWithoutCodegen(optimizedPlan.expressions.head, inputRow) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ColumnPruningSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ColumnPruningSuite.scala index dd7d65ddc9e96..42227a3fa169a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ColumnPruningSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ColumnPruningSuite.scala @@ -160,7 +160,7 @@ class ColumnPruningSuite extends PlanTest { } test("Eliminate the Project with an empty projectList") { - val input = OneRowRelation + val input = OneRowRelation() val expected = Project(Literal(1).as("1") :: Nil, input).analyze val query1 = diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubqueryAliasesSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubqueryAliasesSuite.scala index 9b6d68aee803a..4df1a145a271b 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubqueryAliasesSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubqueryAliasesSuite.scala @@ -35,8 +35,8 @@ class EliminateSubqueryAliasesSuite extends PlanTest with PredicateHelper { } private def assertEquivalent(e1: Expression, e2: Expression): Unit = { - val correctAnswer = Project(Alias(e2, "out")() :: Nil, OneRowRelation).analyze - val actual = Optimize.execute(Project(Alias(e1, "out")() :: Nil, OneRowRelation).analyze) + val correctAnswer = Project(Alias(e2, "out")() :: Nil, OneRowRelation()).analyze + val actual = Optimize.execute(Project(Alias(e1, "out")() :: Nil, OneRowRelation()).analyze) comparePlans(actual, correctAnswer) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalSuite.scala index d436b627f6bd2..4fbbcbc337c6e 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalSuite.scala @@ -33,7 +33,7 @@ class SimplifyConditionalSuite extends PlanTest with PredicateHelper { } protected def assertEquivalent(e1: Expression, e2: Expression): Unit = { - val correctAnswer = Project(Alias(e2, "out")() :: Nil, OneRowRelation).analyze + val correctAnswer = Project(Alias(e2, "out")() :: Nil, OneRowRelation()).analyze val actual = Optimize.execute(Project(Alias(e1, "out")() :: Nil, OneRowRelation).analyze) comparePlans(actual, correctAnswer) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/CatalystQlSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/CatalystQlSuite.scala index 048b4f12b9edf..21cba53ddf26d 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/CatalystQlSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/CatalystQlSuite.scala @@ -29,7 +29,7 @@ class CatalystQlSuite extends PlanTest { val parser = new CatalystQl() test("test case insensitive") { - val result = Project(UnresolvedAlias(Literal(1)):: Nil, OneRowRelation) + val result = Project(UnresolvedAlias(Literal(1)):: Nil, OneRowRelation()) assert(result === parser.parsePlan("seLect 1")) assert(result === parser.parsePlan("select 1")) assert(result === parser.parsePlan("SELECT 1")) @@ -42,7 +42,7 @@ class CatalystQlSuite extends PlanTest { Not( GreaterThan(Literal(true), Literal(true))) ) :: Nil, - OneRowRelation) + OneRowRelation()) comparePlans(parsed, expected) } @@ -82,7 +82,7 @@ class CatalystQlSuite extends PlanTest { UnresolvedAlias( Literal(result) ) :: Nil, - OneRowRelation) + OneRowRelation()) comparePlans(parsed, expected) } @@ -133,7 +133,7 @@ class CatalystQlSuite extends PlanTest { UnresolvedAlias( Literal(output) ) :: Nil, - OneRowRelation) + OneRowRelation()) comparePlans(parsed, expected) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala index 0541844e0bfcd..d447eca931b6f 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala @@ -66,6 +66,6 @@ abstract class PlanTest extends SparkFunSuite with PredicateHelper { /** Fails the test if the two expressions do not match */ protected def compareExpressions(e1: Expression, e2: Expression): Unit = { - comparePlans(Filter(e1, OneRowRelation), Filter(e2, OneRowRelation)) + comparePlans(Filter(e1, OneRowRelation()), Filter(e2, OneRowRelation())) } } From 302753b1a10caa55b4a18ccce87cab64bcf35e5f Mon Sep 17 00:00:00 2001 From: tedyu Date: Thu, 17 Mar 2016 14:53:40 -0700 Subject: [PATCH 5/9] Revert previous attempt --- .../spark/sql/catalyst/parser/CatalystQl.scala | 2 +- .../catalyst/plans/logical/basicOperators.scala | 2 +- .../expressions/ExpressionEvalHelper.scala | 2 +- .../expressions/MathFunctionsSuite.scala | 2 +- .../catalyst/optimizer/ColumnPruningSuite.scala | 2 +- .../EliminateSubqueryAliasesSuite.scala | 4 ++-- .../optimizer/SimplifyConditionalSuite.scala | 2 +- .../sql/catalyst/parser/CatalystQlSuite.scala | 8 ++++---- .../spark/sql/catalyst/plans/PlanTest.scala | 2 +- .../org/apache/spark/sql/hive/SQLBuilder.scala | 16 ++++++++-------- 10 files changed, 21 insertions(+), 21 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/CatalystQl.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/CatalystQl.scala index 415e64dad72f0..7d5a46873c217 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/CatalystQl.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/CatalystQl.scala @@ -201,7 +201,7 @@ https://cwiki.apache.org/confluence/display/Hive/Enhanced+Aggregation%2C+Cube%2C val relations = fromClause match { case Some(f) => nodeToRelation(f) - case None => OneRowRelation() + case None => OneRowRelation } val withLateralView = lateralViewClause.map { lv => diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala index 2f681e665d7bd..09ea3fea6a694 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala @@ -673,7 +673,7 @@ case class Repartition(numPartitions: Int, shuffle: Boolean, child: LogicalPlan) /** * A relation with one row. This is used in "SELECT ..." without a from clause. */ -case class OneRowRelation() extends LeafNode { +case object OneRowRelation extends LeafNode { override def maxRows: Option[Long] = Some(1) override def output: Seq[Attribute] = Nil diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala index bb5eebec6aa78..cf26d4843d84f 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala @@ -152,7 +152,7 @@ trait ExpressionEvalHelper extends GeneratorDrivenPropertyChecks { expression: Expression, expected: Any, inputRow: InternalRow = EmptyRow): Unit = { - val plan = Project(Alias(expression, s"Optimized($expression)")() :: Nil, OneRowRelation()) + val plan = Project(Alias(expression, s"Optimized($expression)")() :: Nil, OneRowRelation) val optimizedPlan = DefaultOptimizer.execute(plan) checkEvaluationWithoutCodegen(optimizedPlan.expressions.head, expected, inputRow) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala index 5db5989af016d..bd674dadd0fcc 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala @@ -150,7 +150,7 @@ class MathFunctionsSuite extends SparkFunSuite with ExpressionEvalHelper { private def checkNaNWithOptimization( expression: Expression, inputRow: InternalRow = EmptyRow): Unit = { - val plan = Project(Alias(expression, s"Optimized($expression)")() :: Nil, OneRowRelation()) + val plan = Project(Alias(expression, s"Optimized($expression)")() :: Nil, OneRowRelation) val optimizedPlan = DefaultOptimizer.execute(plan) checkNaNWithoutCodegen(optimizedPlan.expressions.head, inputRow) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ColumnPruningSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ColumnPruningSuite.scala index 42227a3fa169a..dd7d65ddc9e96 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ColumnPruningSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ColumnPruningSuite.scala @@ -160,7 +160,7 @@ class ColumnPruningSuite extends PlanTest { } test("Eliminate the Project with an empty projectList") { - val input = OneRowRelation() + val input = OneRowRelation val expected = Project(Literal(1).as("1") :: Nil, input).analyze val query1 = diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubqueryAliasesSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubqueryAliasesSuite.scala index 4df1a145a271b..9b6d68aee803a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubqueryAliasesSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/EliminateSubqueryAliasesSuite.scala @@ -35,8 +35,8 @@ class EliminateSubqueryAliasesSuite extends PlanTest with PredicateHelper { } private def assertEquivalent(e1: Expression, e2: Expression): Unit = { - val correctAnswer = Project(Alias(e2, "out")() :: Nil, OneRowRelation()).analyze - val actual = Optimize.execute(Project(Alias(e1, "out")() :: Nil, OneRowRelation()).analyze) + val correctAnswer = Project(Alias(e2, "out")() :: Nil, OneRowRelation).analyze + val actual = Optimize.execute(Project(Alias(e1, "out")() :: Nil, OneRowRelation).analyze) comparePlans(actual, correctAnswer) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalSuite.scala index 4fbbcbc337c6e..d436b627f6bd2 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalSuite.scala @@ -33,7 +33,7 @@ class SimplifyConditionalSuite extends PlanTest with PredicateHelper { } protected def assertEquivalent(e1: Expression, e2: Expression): Unit = { - val correctAnswer = Project(Alias(e2, "out")() :: Nil, OneRowRelation()).analyze + val correctAnswer = Project(Alias(e2, "out")() :: Nil, OneRowRelation).analyze val actual = Optimize.execute(Project(Alias(e1, "out")() :: Nil, OneRowRelation).analyze) comparePlans(actual, correctAnswer) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/CatalystQlSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/CatalystQlSuite.scala index 21cba53ddf26d..048b4f12b9edf 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/CatalystQlSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/CatalystQlSuite.scala @@ -29,7 +29,7 @@ class CatalystQlSuite extends PlanTest { val parser = new CatalystQl() test("test case insensitive") { - val result = Project(UnresolvedAlias(Literal(1)):: Nil, OneRowRelation()) + val result = Project(UnresolvedAlias(Literal(1)):: Nil, OneRowRelation) assert(result === parser.parsePlan("seLect 1")) assert(result === parser.parsePlan("select 1")) assert(result === parser.parsePlan("SELECT 1")) @@ -42,7 +42,7 @@ class CatalystQlSuite extends PlanTest { Not( GreaterThan(Literal(true), Literal(true))) ) :: Nil, - OneRowRelation()) + OneRowRelation) comparePlans(parsed, expected) } @@ -82,7 +82,7 @@ class CatalystQlSuite extends PlanTest { UnresolvedAlias( Literal(result) ) :: Nil, - OneRowRelation()) + OneRowRelation) comparePlans(parsed, expected) } @@ -133,7 +133,7 @@ class CatalystQlSuite extends PlanTest { UnresolvedAlias( Literal(output) ) :: Nil, - OneRowRelation()) + OneRowRelation) comparePlans(parsed, expected) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala index d447eca931b6f..0541844e0bfcd 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala @@ -66,6 +66,6 @@ abstract class PlanTest extends SparkFunSuite with PredicateHelper { /** Fails the test if the two expressions do not match */ protected def compareExpressions(e1: Expression, e2: Expression): Unit = { - comparePlans(Filter(e1, OneRowRelation()), Filter(e2, OneRowRelation())) + comparePlans(Filter(e1, OneRowRelation), Filter(e2, OneRowRelation)) } } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala index ba14ff2d23016..05dfad239aa69 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala @@ -481,14 +481,14 @@ class SQLBuilder(logicalPlan: LogicalPlan, sqlContext: SQLContext) extends Loggi } private def addSubqueryIfNeeded(plan: LogicalPlan): LogicalPlan = plan match { - case _: SubqueryAlias => plan - case _: Filter => plan - case _: Join => plan - case _: LocalLimit => plan - case _: GlobalLimit => plan - case _: SQLTable => plan - case _: Generate => plan - case _: OneRowRelation => plan + case _: SubqueryAlias | + _: Filter | + _: Join | + _: LocalLimit | + _: GlobalLimit | + _: SQLTable | + _: Generate | + OneRowRelation => plan case _ => addSubquery(plan) } } From 298c8fe1ddb919131f15a561fcdeb2e99c458b74 Mon Sep 17 00:00:00 2001 From: tedyu Date: Thu, 17 Mar 2016 15:22:00 -0700 Subject: [PATCH 6/9] [SPARK-12719][HOTFIX] Fix compilation against Scala 2.10 --- project/SparkBuild.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index dbe98d1e14fda..e49f1d83a37e0 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -124,6 +124,7 @@ object SparkBuild extends PomBuild { // in the same way as Maven which handles -Dname as -Dname=true before executes build process. // see: https://github.com/apache/maven/blob/maven-3.0.4/maven-embedder/src/main/java/org/apache/maven/cli/MavenCli.java#L1082 System.setProperty("scala-2.10", "true") + System.setProperty("scalac.patmat.analysisBudget", "512") } profiles } From a5403b12ef97c6ee2acad00c3e5fd7bfe28dc144 Mon Sep 17 00:00:00 2001 From: tedyu Date: Thu, 17 Mar 2016 15:55:01 -0700 Subject: [PATCH 7/9] Move analysisBudget to scalacOptions --- project/SparkBuild.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index e49f1d83a37e0..58e59132a20d7 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -124,7 +124,6 @@ object SparkBuild extends PomBuild { // in the same way as Maven which handles -Dname as -Dname=true before executes build process. // see: https://github.com/apache/maven/blob/maven-3.0.4/maven-embedder/src/main/java/org/apache/maven/cli/MavenCli.java#L1082 System.setProperty("scala-2.10", "true") - System.setProperty("scalac.patmat.analysisBudget", "512") } profiles } @@ -193,6 +192,7 @@ object SparkBuild extends PomBuild { scalacOptions in Compile ++= Seq( s"-target:jvm-${scalacJVMVersion.value}", + "-Dscalac.patmat.analysisBudget=512", "-sourcepath", (baseDirectory in ThisBuild).value.getAbsolutePath // Required for relative source links in scaladoc ), From f13abe86b5acb06c9f2f972582d6c2e21cae948b Mon Sep 17 00:00:00 2001 From: tedyu Date: Thu, 17 Mar 2016 19:30:54 -0700 Subject: [PATCH 8/9] Revert addition of analysisBudget since it doesn't work --- project/SparkBuild.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 58e59132a20d7..dbe98d1e14fda 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -192,7 +192,6 @@ object SparkBuild extends PomBuild { scalacOptions in Compile ++= Seq( s"-target:jvm-${scalacJVMVersion.value}", - "-Dscalac.patmat.analysisBudget=512", "-sourcepath", (baseDirectory in ThisBuild).value.getAbsolutePath // Required for relative source links in scaladoc ), From bd37dcf9557a3c2757987cd74793f080ad42f515 Mon Sep 17 00:00:00 2001 From: tedyu Date: Thu, 17 Mar 2016 19:33:18 -0700 Subject: [PATCH 9/9] Expand the cases in match --- .../org/apache/spark/sql/hive/SQLBuilder.scala | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala index 05dfad239aa69..eda82549a7bad 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala @@ -481,14 +481,14 @@ class SQLBuilder(logicalPlan: LogicalPlan, sqlContext: SQLContext) extends Loggi } private def addSubqueryIfNeeded(plan: LogicalPlan): LogicalPlan = plan match { - case _: SubqueryAlias | - _: Filter | - _: Join | - _: LocalLimit | - _: GlobalLimit | - _: SQLTable | - _: Generate | - OneRowRelation => plan + case _: SubqueryAlias => plan + case _: Filter => plan + case _: Join => plan + case _: LocalLimit => plan + case _: GlobalLimit => plan + case _: SQLTable => plan + case _: Generate => plan + case OneRowRelation => plan case _ => addSubquery(plan) } }