From b46c58a861a87ae6d033edd536f169fdea100f70 Mon Sep 17 00:00:00 2001 From: yangjie01 Date: Mon, 24 May 2021 14:29:52 +0800 Subject: [PATCH] fix scala-2.13.6 compile --- .../main/scala/org/apache/spark/SparkContext.scala | 2 +- .../spark/storage/DiskBlockObjectWriter.scala | 2 +- pom.xml | 2 +- .../spark/sql/catalyst/util/DateFormatter.scala | 1 + .../sql/catalyst/util/TimestampFormatter.scala | 1 + .../streaming/continuous/EpochTracker.scala | 2 +- .../org/apache/spark/sql/SQLQueryTestSuite.scala | 14 +++++++------- .../spark/sql/SparkSessionExtensionSuite.scala | 2 +- .../thriftserver/ThriftServerQueryTestSuite.scala | 4 ++-- 9 files changed, 16 insertions(+), 14 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index ef47252189bbb..9580b51bf30ff 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -352,7 +352,7 @@ class SparkContext(config: SparkConf) extends Logging { // Thread Local variable that can be used by users to pass information down the stack protected[spark] val localProperties = new InheritableThreadLocal[Properties] { - override protected def childValue(parent: Properties): Properties = { + override def childValue(parent: Properties): Properties = { // Note: make a clone such that changes in the parent properties aren't reflected in // the those of the children threads, which has confusing semantics (SPARK-10563). Utils.cloneProperties(parent) diff --git a/core/src/main/scala/org/apache/spark/storage/DiskBlockObjectWriter.scala b/core/src/main/scala/org/apache/spark/storage/DiskBlockObjectWriter.scala index e55c09274cd9a..d4afb1e2d5a21 100644 --- a/core/src/main/scala/org/apache/spark/storage/DiskBlockObjectWriter.scala +++ b/core/src/main/scala/org/apache/spark/storage/DiskBlockObjectWriter.scala @@ -283,7 +283,7 @@ private[spark] class DiskBlockObjectWriter( } // For testing - private[spark] override def flush(): Unit = { + override def flush(): Unit = { objOut.flush() bs.flush() } diff --git a/pom.xml b/pom.xml index 6b1b63c21bb25..c2907a1e8ed08 100644 --- a/pom.xml +++ b/pom.xml @@ -3342,7 +3342,7 @@ scala-2.13 - 2.13.5 + 2.13.6 2.13 diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala index da80e629ee31d..bd1ac026b9bd1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateFormatter.scala @@ -81,6 +81,7 @@ class Iso8601DateFormatter( try { formatter } catch checkLegacyFormatter(pattern, legacyFormatter.validatePatternString) + () } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala index 6832d1a9954fb..e06f828eebcd5 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TimestampFormatter.scala @@ -107,6 +107,7 @@ class Iso8601TimestampFormatter( try { formatter } catch checkLegacyFormatter(pattern, legacyFormatter.validatePatternString) + () } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/EpochTracker.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/EpochTracker.scala index 631ae4806d2f4..30832c6542bc9 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/EpochTracker.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/EpochTracker.scala @@ -28,7 +28,7 @@ object EpochTracker { // update the underlying AtomicLong as it finishes epochs. Other code should only read the value. private val currentEpoch: InheritableThreadLocal[AtomicLong] = { new InheritableThreadLocal[AtomicLong] { - override protected def childValue(parent: AtomicLong): AtomicLong = { + override def childValue(parent: AtomicLong): AtomicLong = { // Note: make another instance so that changes in the parent epoch aren't reflected in // those in the children threads. This is required at `ContinuousCoalesceRDD`. new AtomicLong(parent.get) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index 609e0094f9ee4..84c7d1006891b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -222,13 +222,13 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper // Create a test case to ignore this case. ignore(testCase.name) { /* Do nothing */ } } else testCase match { - case udfTestCase: UDFTest + case udfTestCase: SQLQueryTestSuite#UDFTest if udfTestCase.udf.isInstanceOf[TestPythonUDF] && !shouldTestPythonUDFs => ignore(s"${testCase.name} is skipped because " + s"[$pythonExec] and/or pyspark were not available.") { /* Do nothing */ } - case udfTestCase: UDFTest + case udfTestCase: SQLQueryTestSuite#UDFTest if udfTestCase.udf.isInstanceOf[TestScalarPandasUDF] && !shouldTestScalarPandasUDFs => ignore(s"${testCase.name} is skipped because pyspark," + s"pandas and/or pyarrow were not available in [$pythonExec].") { @@ -354,13 +354,13 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper val localSparkSession = spark.newSession() testCase match { - case udfTestCase: UDFTest => + case udfTestCase: SQLQueryTestSuite#UDFTest => registerTestUDF(udfTestCase.udf, localSparkSession) case _ => } testCase match { - case _: PgSQLTest => + case _: SQLQueryTestSuite#PgSQLTest => // booleq/boolne used by boolean.sql localSparkSession.udf.register("booleq", (b1: Boolean, b2: Boolean) => b1 == b2) localSparkSession.udf.register("boolne", (b1: Boolean, b2: Boolean) => b1 != b2) @@ -368,7 +368,7 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper localSparkSession.udf.register("vol", (s: String) => s) localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true) localSparkSession.conf.set(SQLConf.LEGACY_INTERVAL_ENABLED.key, true) - case _: AnsiTest => + case _: SQLQueryTestSuite#AnsiTest => localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true) case _ => } @@ -411,10 +411,10 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper // See also SPARK-29127. It is difficult to see the version information in the failed test // cases so the version information related to Python was also added. val clue = testCase match { - case udfTestCase: UDFTest + case udfTestCase: SQLQueryTestSuite#UDFTest if udfTestCase.udf.isInstanceOf[TestPythonUDF] && shouldTestPythonUDFs => s"${testCase.name}${System.lineSeparator()}Python: $pythonVer${System.lineSeparator()}" - case udfTestCase: UDFTest + case udfTestCase: SQLQueryTestSuite#UDFTest if udfTestCase.udf.isInstanceOf[TestScalarPandasUDF] && shouldTestScalarPandasUDFs => s"${testCase.name}${System.lineSeparator()}" + s"Python: $pythonVer Pandas: $pandasVer PyArrow: $pyarrowVer${System.lineSeparator()}" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala index c8768ec2c5af1..9fda5694245c8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala @@ -509,7 +509,7 @@ case class NoCloseColumnVector(wrapped: ColumnVector) extends ColumnVector(wrapp override def getBinary(rowId: Int): Array[Byte] = wrapped.getBinary(rowId) - override protected def getChild(ordinal: Int): ColumnVector = wrapped.getChild(ordinal) + override def getChild(ordinal: Int): ColumnVector = wrapped.getChild(ordinal) } trait ColumnarExpression extends Expression with Serializable { diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala index 1d03c90cc812b..caeb64da3cce3 100644 --- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala +++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala @@ -107,10 +107,10 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite with SharedThriftServ } testCase match { - case _: PgSQLTest => + case _: SQLQueryTestSuite#PgSQLTest => statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = true") statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key} = true") - case _: AnsiTest => + case _: SQLQueryTestSuite#AnsiTest => statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = true") case _ => statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = false")