From 338d7bfc79e91eeeb11f2224ad9278624cdd03c5 Mon Sep 17 00:00:00 2001 From: Holden Karau Date: Fri, 1 May 2015 13:06:37 -0700 Subject: [PATCH] rename setLoggingLevel to setLogLevel --- core/src/main/scala/org/apache/spark/SparkContext.scala | 4 ++-- .../scala/org/apache/spark/api/java/JavaSparkContext.scala | 4 ++-- core/src/main/scala/org/apache/spark/util/Utils.scala | 2 +- core/src/test/scala/org/apache/spark/util/UtilsSuite.scala | 4 ++-- python/pyspark/context.py | 4 ++-- python/pyspark/sql/dataframe.py | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 3ab00024585c9..6d1e10a5f3845 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -348,8 +348,8 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli * Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN * Invalid log level defaults to DEBUG */ - def setLoggingLevel(logLevel: String) { - Utils.setLoggingLevel(org.apache.log4j.Level.toLevel(logLevel)) + def setLogLevel(logLevel: String) { + Utils.setLogLevel(org.apache.log4j.Level.toLevel(logLevel)) } try { diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala index bde3d129646ba..c1f78cd891686 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala @@ -760,8 +760,8 @@ class JavaSparkContext(val sc: SparkContext) * Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN * Invalid log level defaults to DEBUG */ - def setLoggingLevel(logLevel: String) { - sc.setLoggingLevel(logLevel) + def setLogLevel(logLevel: String) { + sc.setLogLevel(logLevel) } /** diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index c0fe07ff64740..844f0cd22d95d 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -2025,7 +2025,7 @@ private[spark] object Utils extends Logging { /** * configure a new log4j level */ - def setLoggingLevel(l: org.apache.log4j.Level) { + def setLogLevel(l: org.apache.log4j.Level) { org.apache.log4j.Logger.getRootLogger().setLevel(l) } diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala index 7b955ceb6c750..651ead6ff1de2 100644 --- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala @@ -478,9 +478,9 @@ class UtilsSuite extends FunSuite with ResetSystemProperties with Logging { // Test for using the util function to change our log levels. test("log4j log level change") { - Utils.setLoggingLevel(org.apache.log4j.Level.ALL) + Utils.setLogLevel(org.apache.log4j.Level.ALL) assert(log.isInfoEnabled()) - Utils.setLoggingLevel(org.apache.log4j.Level.ERROR) + Utils.setLogLevel(org.apache.log4j.Level.ERROR) assert(!log.isInfoEnabled()) assert(log.isErrorEnabled()) } diff --git a/python/pyspark/context.py b/python/pyspark/context.py index f97da438c80ab..b27240a0c9a44 100644 --- a/python/pyspark/context.py +++ b/python/pyspark/context.py @@ -267,13 +267,13 @@ def __exit__(self, type, value, trace): """ self.stop() - def setLoggingLevel(self, logLevel): + def setLogLevel(self, logLevel): """ Control our logLevel. This overrides any user-defined log settings. Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN Invalid log level defaults to DEBUG """ - self._jsc.setLoggingLevel(logLevel) + self._jsc.setLogLevel(logLevel) @classmethod def setSystemProperty(cls, key, value): diff --git a/python/pyspark/sql/dataframe.py b/python/pyspark/sql/dataframe.py index d9cbbc68b3bf0..c45eaf296f410 100644 --- a/python/pyspark/sql/dataframe.py +++ b/python/pyspark/sql/dataframe.py @@ -1,6 +1,6 @@ # # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with +# contir[butor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with