Skip to content

Commit

Permalink
Remove default param for shuffle partitions accessor.
Browse files Browse the repository at this point in the history
  • Loading branch information
concretevitamin committed Jun 9, 2014
1 parent 5f7e6d8 commit baa5d29
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 4 deletions.
3 changes: 1 addition & 2 deletions sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,7 @@ import scala.collection.JavaConverters._
class SQLConf {

/** Number of partitions to use for shuffle operators. */
def numShufflePartitions(default: Int = 150): Int = getOption("spark.sql.shuffle.partitions")
.map(_.toInt).getOrElse(default)
private[spark] def numShufflePartitions: Int = get("spark.sql.shuffle.partitions", "200").toInt

private val settings = java.util.Collections.synchronizedMap(
new java.util.HashMap[String, String]())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ case class Exchange(newPartitioning: Partitioning, child: SparkPlan) extends Una
*/
private[sql] case class AddExchange(sqlContext: SQLContext) extends Rule[SparkPlan] {
// TODO: Determine the number of partitions.
def numPartitions = sqlContext.sqlConf.numShufflePartitions(150)
def numPartitions = sqlContext.sqlConf.numShufflePartitions

def apply(plan: SparkPlan): SparkPlan = plan.transformUp {
case operator: SparkPlan =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] {

// Can we automate these 'pass through' operations?
object BasicOperators extends Strategy {
def numPartitions = sqlConf.numShufflePartitions(200)
def numPartitions = sqlConf.numShufflePartitions

def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case logical.Distinct(child) =>
Expand Down

0 comments on commit baa5d29

Please sign in to comment.