From e4ed806b65dfbf70b323e75eb10471dfd89b5c02 Mon Sep 17 00:00:00 2001 From: Yuming Wang Date: Tue, 24 Sep 2019 10:54:21 +0800 Subject: [PATCH] Revert to use Utils.doFetchFile --- .../org/apache/spark/deploy/SparkSubmit.scala | 4 +-- .../apache/spark/sql/hive/test/TestHive.scala | 27 ++++++++++++------- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 118c8e0b960fb..ebc843d9758ee 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -1302,7 +1302,6 @@ private[spark] object SparkSubmitUtils { * @param coordinates Comma-delimited string of maven coordinates * @param ivySettings An IvySettings containing resolvers to use * @param exclusions Exclusions to apply when resolving transitive dependencies - * @param isTransitive If the dependencies should be resolved transitively * @return The comma-delimited path to the jars of the given maven artifacts including their * transitive dependencies */ @@ -1310,7 +1309,6 @@ private[spark] object SparkSubmitUtils { coordinates: String, ivySettings: IvySettings, exclusions: Seq[String] = Nil, - isTransitive: Boolean = true, isTest: Boolean = false): String = { if (coordinates == null || coordinates.trim.isEmpty) { "" @@ -1332,7 +1330,7 @@ private[spark] object SparkSubmitUtils { val ivy = Ivy.newInstance(ivySettings) // Set resolve options to download transitive dependencies as well val resolveOptions = new ResolveOptions - resolveOptions.setTransitive(isTransitive) + resolveOptions.setTransitive(true) val retrieveOptions = new RetrieveOptions // Turn downloading and logging off for testing if (isTest) { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala index ee30d6c604895..64855986e90ae 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala @@ -32,7 +32,6 @@ import org.apache.hadoop.hive.ql.exec.FunctionRegistry import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe import org.apache.spark.{SparkConf, SparkContext} -import org.apache.spark.deploy.SparkSubmitUtils import org.apache.spark.internal.Logging import org.apache.spark.internal.config import org.apache.spark.internal.config.UI._ @@ -650,14 +649,22 @@ private[sql] class TestHiveSessionStateBuilder( } private[hive] object HiveTestJars { - def getHiveContribJar: File = getHiveTestJar("org.apache.hive:hive-contrib") - def getHiveHcatalogCoreJar: File = getHiveTestJar("org.apache.hive.hcatalog:hive-hcatalog-core") - - private def getHiveTestJar(coordinate: String): File = { - // isTransitive = false: Only direct dependencies should be resolved. - val filePath = SparkSubmitUtils.resolveMavenCoordinates( - s"$coordinate:${HiveUtils.builtinHiveVersion}", - SparkSubmitUtils.buildIvySettings(None, None), isTransitive = false) - new File(filePath) + private val repository = "https://repository.apache.org/content/repositories/releases/" + private val hiveTestJarsDir = Utils.createTempDir() + + def getHiveContribJar: File = + getJarFromUrl(s"${repository}org/apache/hive/hive-contrib/" + + s"${HiveUtils.builtinHiveVersion}/hive-contrib-${HiveUtils.builtinHiveVersion}.jar") + def getHiveHcatalogCoreJar: File = + getJarFromUrl(s"${repository}org/apache/hive/hcatalog/hive-hcatalog-core/" + + s"${HiveUtils.builtinHiveVersion}/hive-hcatalog-core-${HiveUtils.builtinHiveVersion}.jar") + + private def getJarFromUrl(urlString: String): File = { + val fileName = urlString.split("/").last + val targetFile = new File(hiveTestJarsDir, fileName) + if (!targetFile.exists()) { + Utils.doFetchFile(urlString, hiveTestJarsDir, fileName, new SparkConf, null, null) + } + targetFile } }