Skip to content

Commit

Permalink
Add support for changing the log level at run time through the SparkC…
Browse files Browse the repository at this point in the history
…ontext. Based on an earlier PR, apache#2433 includes CR feedback from @pwendel & @davies
  • Loading branch information
holdenk committed Apr 29, 2015
1 parent c9d530e commit d9d03f3
Show file tree
Hide file tree
Showing 5 changed files with 58 additions and 15 deletions.
9 changes: 9 additions & 0 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -343,6 +343,15 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
value
}

/** Control our logLevel. This overrides any user-defined log settings.
* @param logLevel The desired log level as a string.
* Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN
* Invalid log level defaults to DEBUG
*/
def setLoggingLevel(logLevel: String) = {
Utils.setLoggingLevel(org.apache.log4j.Level.toLevel(logLevel))
}

try {
_conf = config.clone()
_conf.validateSettings()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -755,6 +755,15 @@ class JavaSparkContext(val sc: SparkContext)
*/
def getLocalProperty(key: String): String = sc.getLocalProperty(key)

/** Control our logLevel. This overrides any user-defined log settings.
* @param logLevel The desired log level as a string.
* Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN
* Invalid log level defaults to DEBUG
*/
def setLoggingLevel(logLevel: String) = {
sc.setLoggingLevel(logLevel)
}

/**
* Assigns a group ID to all the jobs started by this thread until the group ID is set to a
* different value or cleared.
Expand Down
7 changes: 7 additions & 0 deletions core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2022,6 +2022,13 @@ private[spark] object Utils extends Logging {
}
}

/**
* configure a new log4j level
*/
def setLoggingLevel(l: org.apache.log4j.Level) {
org.apache.log4j.Logger.getRootLogger().setLevel(l)
}

/**
* config a log4j properties used for testsuite
*/
Expand Down
40 changes: 25 additions & 15 deletions core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,10 @@ import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path

import org.apache.spark.network.util.ByteUnit
import org.apache.spark.Logging
import org.apache.spark.SparkConf

class UtilsSuite extends FunSuite with ResetSystemProperties {
class UtilsSuite extends FunSuite with ResetSystemProperties with Logging {

test("timeConversion") {
// Test -1
Expand Down Expand Up @@ -68,7 +69,7 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
intercept[NumberFormatException] {
Utils.timeStringAsMs("600l")
}

intercept[NumberFormatException] {
Utils.timeStringAsMs("This breaks 600s")
}
Expand Down Expand Up @@ -99,7 +100,7 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
assert(Utils.byteStringAsGb("1k") === 0)
assert(Utils.byteStringAsGb("1t") === ByteUnit.TiB.toGiB(1))
assert(Utils.byteStringAsGb("1p") === ByteUnit.PiB.toGiB(1))

assert(Utils.byteStringAsMb("1") === 1)
assert(Utils.byteStringAsMb("1m") === 1)
assert(Utils.byteStringAsMb("1048575b") === 0)
Expand All @@ -118,7 +119,7 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
assert(Utils.byteStringAsKb("1g") === ByteUnit.GiB.toKiB(1))
assert(Utils.byteStringAsKb("1t") === ByteUnit.TiB.toKiB(1))
assert(Utils.byteStringAsKb("1p") === ByteUnit.PiB.toKiB(1))

assert(Utils.byteStringAsBytes("1") === 1)
assert(Utils.byteStringAsBytes("1k") === ByteUnit.KiB.toBytes(1))
assert(Utils.byteStringAsBytes("1m") === ByteUnit.MiB.toBytes(1))
Expand All @@ -127,17 +128,17 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
assert(Utils.byteStringAsBytes("1p") === ByteUnit.PiB.toBytes(1))

// Overflow handling, 1073741824p exceeds Long.MAX_VALUE if converted straight to Bytes
// This demonstrates that we can have e.g 1024^3 PB without overflowing.
// This demonstrates that we can have e.g 1024^3 PB without overflowing.
assert(Utils.byteStringAsGb("1073741824p") === ByteUnit.PiB.toGiB(1073741824))
assert(Utils.byteStringAsMb("1073741824p") === ByteUnit.PiB.toMiB(1073741824))

// Run this to confirm it doesn't throw an exception
assert(Utils.byteStringAsBytes("9223372036854775807") === 9223372036854775807L)
assert(Utils.byteStringAsBytes("9223372036854775807") === 9223372036854775807L)
assert(ByteUnit.PiB.toPiB(9223372036854775807L) === 9223372036854775807L)

// Test overflow exception
intercept[IllegalArgumentException] {
// This value exceeds Long.MAX when converted to bytes
// This value exceeds Long.MAX when converted to bytes
Utils.byteStringAsBytes("9223372036854775808")
}

Expand All @@ -146,22 +147,22 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
// This value exceeds Long.MAX when converted to TB
ByteUnit.PiB.toTiB(9223372036854775807L)
}

// Test fractional string
intercept[NumberFormatException] {
Utils.byteStringAsMb("0.064")
}

// Test fractional string
intercept[NumberFormatException] {
Utils.byteStringAsMb("0.064m")
}

// Test invalid strings
intercept[NumberFormatException] {
Utils.byteStringAsBytes("500ub")
}

// Test invalid strings
intercept[NumberFormatException] {
Utils.byteStringAsBytes("This breaks 600b")
Expand All @@ -174,12 +175,12 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
intercept[NumberFormatException] {
Utils.byteStringAsBytes("600gb This breaks")
}

intercept[NumberFormatException] {
Utils.byteStringAsBytes("This 123mb breaks")
}
}

test("bytesToString") {
assert(Utils.bytesToString(10) === "10.0 B")
assert(Utils.bytesToString(1500) === "1500.0 B")
Expand Down Expand Up @@ -475,6 +476,15 @@ class UtilsSuite extends FunSuite with ResetSystemProperties {
}
}

// Test for using the util function to change our log levels.
test("log4j log level change") {
Utils.setLoggingLevel(org.apache.log4j.Level.ALL)
assert(log.isInfoEnabled())
Utils.setLoggingLevel(org.apache.log4j.Level.ERROR)
assert(!log.isInfoEnabled())
assert(log.isErrorEnabled())
}

test("deleteRecursively") {
val tempDir1 = Utils.createTempDir()
assert(tempDir1.exists())
Expand Down
8 changes: 8 additions & 0 deletions python/pyspark/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,6 +267,14 @@ def __exit__(self, type, value, trace):
"""
self.stop()

def setLoggingLevel(self, logLevel):
"""
Control our logLevel. This overrides any user-defined log settings.
Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN
Invalid log level defaults to DEBUG
"""
self._jsc.setLoggingLevel(logLevel)

@classmethod
def setSystemProperty(cls, key, value):
"""
Expand Down

0 comments on commit d9d03f3

Please sign in to comment.