From 688358e34c4f0d47755fc97a5fc577aaabd5aafc Mon Sep 17 00:00:00 2001 From: WangTao Date: Wed, 18 Jun 2014 21:46:45 +0800 Subject: [PATCH] Minor fix --- core/src/main/scala/org/apache/spark/SparkContext.scala | 1 - .../main/scala/org/apache/spark/util/MetadataCleaner.scala | 7 ++++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 0678bdd02110e..f9476ff826a62 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -224,7 +224,6 @@ class SparkContext(config: SparkConf) extends Logging { /** A default Hadoop Configuration for the Hadoop code (e.g. file systems) that we reuse. */ val hadoopConfiguration: Configuration = { - val env = SparkEnv.get val hadoopConf = SparkHadoopUtil.get.newConfiguration() // Explicitly check for S3 environment variables if (System.getenv("AWS_ACCESS_KEY_ID") != null && diff --git a/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala index 7ebed5105b9fd..2889e171f627e 100644 --- a/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala +++ b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala @@ -91,8 +91,13 @@ private[spark] object MetadataCleaner { conf.set(MetadataCleanerType.systemProperty(cleanerType), delay.toString) } + /** + * Set the default delay time (in seconds). + * @param conf SparkConf instance + * @param delay default delay time to set + * @param resetAll whether to reset all to default + */ def setDelaySeconds(conf: SparkConf, delay: Int, resetAll: Boolean = true) { - // override for all ? conf.set("spark.cleaner.ttl", delay.toString) if (resetAll) { for (cleanerType <- MetadataCleanerType.values) {