diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala index 948d16a6e7020..e9fb3ec163ec3 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala @@ -86,9 +86,8 @@ class FileInputDStream[K, V, F <: NewInputFormat[K,V]]( * Files with mod times older than this "window" of remembering will be ignored. So if new * files are visible within this window, then the file will get selected in the next batch. */ - private val minRememberDurationMin = Minutes(ssc.sparkContext.getConf - .get("spark.streaming.minRememberDurationMin", "1") - .toLong) + private val minRememberDurationMin = Minutes(ssc.conf + .getLong("spark.streaming.minRememberDurationMin", 1L)) // This is a def so that it works during checkpoint recovery: private def clock = ssc.scheduler.clock