diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/CompactibleFileStreamLog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/CompactibleFileStreamLog.scala index 664f4bf4786f5..fa1beb9d15c75 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/CompactibleFileStreamLog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/CompactibleFileStreamLog.scala @@ -20,7 +20,6 @@ package org.apache.spark.sql.execution.streaming import java.io.{FileNotFoundException, InputStream, IOException, OutputStream} import java.nio.charset.StandardCharsets.UTF_8 -import scala.annotation.nowarn import scala.io.{Source => IOSource} import scala.reflect.ClassTag @@ -53,7 +52,7 @@ abstract class CompactibleFileStreamLog[T <: AnyRef : ClassTag]( private implicit val formats: Formats = Serialization.formats(NoTypeHints) /** Needed to serialize type T into JSON when using Jackson */ - @nowarn + @scala.annotation.nowarn private implicit val manifest = Manifest.classType[T](implicitly[ClassTag[T]].runtimeClass) protected val minBatchesToRetain = sparkSession.sessionState.conf.minBatchesToRetain diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/FileStreamSourceLog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/FileStreamSourceLog.scala index 64d7b00e6910a..14653864a2922 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/FileStreamSourceLog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/FileStreamSourceLog.scala @@ -33,8 +33,8 @@ class FileStreamSourceLog( path: String) extends CompactibleFileStreamLog[FileEntry](metadataLogVersion, sparkSession, path) { - import org.apache.spark.sql.execution.streaming.CompactibleFileStreamLog._ - import org.apache.spark.sql.execution.streaming.FileStreamSourceLog._ + import CompactibleFileStreamLog._ + import FileStreamSourceLog._ // Configurations about metadata compaction protected override val defaultCompactInterval: Int = diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/HDFSMetadataLog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/HDFSMetadataLog.scala index c03c0e82dd516..79627030e1eba 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/HDFSMetadataLog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/HDFSMetadataLog.scala @@ -21,7 +21,6 @@ import java.io._ import java.nio.charset.StandardCharsets import java.util.{Collections, LinkedHashMap => JLinkedHashMap} -import scala.annotation.nowarn import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag @@ -54,7 +53,7 @@ class HDFSMetadataLog[T <: AnyRef : ClassTag](sparkSession: SparkSession, path: private implicit val formats: Formats = Serialization.formats(NoTypeHints) /** Needed to serialize type T into JSON when using Jackson */ - @nowarn + @scala.annotation.nowarn private implicit val manifest = Manifest.classType[T](implicitly[ClassTag[T]].runtimeClass) // Avoid serializing generic sequences, see SPARK-17372 diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/OperatorStateMetadata.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/OperatorStateMetadata.scala index 6438f19c9f776..b58c805af9d60 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/OperatorStateMetadata.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/OperatorStateMetadata.scala @@ -20,7 +20,6 @@ package org.apache.spark.sql.execution.streaming.state import java.io.{BufferedReader, InputStreamReader} import java.nio.charset.StandardCharsets -import scala.annotation.nowarn import scala.reflect.ClassTag import org.apache.hadoop.conf.Configuration @@ -67,7 +66,7 @@ object OperatorStateMetadataV1 { private implicit val formats: Formats = Serialization.formats(NoTypeHints) - @nowarn + @scala.annotation.nowarn private implicit val manifest = Manifest .classType[OperatorStateMetadataV1](implicitly[ClassTag[OperatorStateMetadataV1]].runtimeClass)