Skip to content

Commit

Permalink
Add configuration options for heap vs. offheap
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshRosen committed Apr 24, 2015
1 parent 6ffdaa1 commit 9c19fc0
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 6 deletions.
16 changes: 15 additions & 1 deletion sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ private[spark] object SQLConf {
val DEFAULT_SIZE_IN_BYTES = "spark.sql.defaultSizeInBytes"
val SHUFFLE_PARTITIONS = "spark.sql.shuffle.partitions"
val CODEGEN_ENABLED = "spark.sql.codegen"
val UNSAFE_ENABLED = "spark.sql.unsafe"
val UNSAFE_ENABLED = "spark.sql.unsafe.enabled"
val UNSAFE_USE_OFF_HEAP = "spark.sql.unsafe.offHeap"
val DIALECT = "spark.sql.dialect"

val PARQUET_BINARY_AS_STRING = "spark.sql.parquet.binaryAsString"
Expand Down Expand Up @@ -150,8 +151,21 @@ private[sql] class SQLConf extends Serializable {
*/
private[spark] def codegenEnabled: Boolean = getConf(CODEGEN_ENABLED, "false").toBoolean

/**
* When set to true, Spark SQL will use managed memory for certain operations. This option only
* takes effect if codegen is enabled.
*
* Defaults to false as this feature is currently experimental.
*/
private[spark] def unsafeEnabled: Boolean = getConf(UNSAFE_ENABLED, "false").toBoolean

/**
* When set to true, Spark SQL will use off-heap memory allocation for managed memory operations.
*
* Defaults to false.
*/
private[spark] def unsafeUseOffHeap: Boolean = getConf(UNSAFE_USE_OFF_HEAP, "false").toBoolean

private[spark] def useSqlSerializer2: Boolean = getConf(USE_SQL_SERIALIZER2, "true").toBoolean

/**
Expand Down
2 changes: 2 additions & 0 deletions sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1013,6 +1013,8 @@ class SQLContext(@transient val sparkContext: SparkContext)

def unsafeEnabled: Boolean = self.conf.unsafeEnabled

def unsafeUseOffHeap: Boolean = self.conf.unsafeUseOffHeap

def numPartitions: Int = self.conf.numShufflePartitions

def strategies: Seq[Strategy] =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,14 +43,16 @@ case class AggregateEvaluation(
* @param aggregateExpressions expressions that are computed for each group.
* @param child the input data source.
* @param unsafeEnabled whether to allow Unsafe-based aggregation buffers to be used.
* @param useOffHeap whether to use off-heap allocation (only takes effect if unsafeEnabled=true)
*/
@DeveloperApi
case class GeneratedAggregate(
partial: Boolean,
groupingExpressions: Seq[Expression],
aggregateExpressions: Seq[NamedExpression],
child: SparkPlan,
unsafeEnabled: Boolean)
unsafeEnabled: Boolean,
useOffHeap: Boolean)
extends UnaryNode {

override def requiredChildDistribution: Seq[Distribution] =
Expand Down Expand Up @@ -289,7 +291,7 @@ case class GeneratedAggregate(
newAggregationBuffer(EmptyRow),
aggregationBufferSchema,
groupKeySchema,
MemoryAllocator.UNSAFE,
if (useOffHeap) MemoryAllocator.UNSAFE else MemoryAllocator.HEAP,
1024 * 16,
false
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,8 +141,10 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
groupingExpressions,
partialComputation,
planLater(child),
unsafeEnabled),
unsafeEnabled) :: Nil
unsafeEnabled,
unsafeUseOffHeap),
unsafeEnabled,
unsafeUseOffHeap) :: Nil

// Cases where some aggregate can not be codegened
case PartialAggregation(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql

import org.scalatest.BeforeAndAfterAll

import org.apache.spark.sql.execution.{GeneratedAggregate}
import org.apache.spark.sql.execution.GeneratedAggregate
import org.apache.spark.sql.functions._
import org.apache.spark.sql.TestData._
import org.apache.spark.sql.test.TestSQLContext
Expand Down

0 comments on commit 9c19fc0

Please sign in to comment.