diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala index 07361cfdce9da..9284b35fb3e35 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala @@ -19,7 +19,6 @@ package org.apache.spark.sql.hive.test import java.io.File import java.net.URI -import java.util.{Set => JavaSet} import scala.collection.JavaConverters._ import scala.collection.mutable @@ -27,7 +26,6 @@ import scala.collection.mutable import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.hadoop.hive.conf.HiveConf.ConfVars -import org.apache.hadoop.hive.ql.exec.FunctionRegistry import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe import org.apache.spark.{SparkConf, SparkContext} @@ -523,12 +521,6 @@ private[hive] class TestHiveSparkSession( } } - /** - * Records the UDFs present when the server starts, so we can delete ones that are created by - * tests. - */ - protected val originalUDFs: JavaSet[String] = FunctionRegistry.getFunctionNames - /** * Resets the test instance by deleting any table, view, temp view, and UDF that have been created */