Skip to content

Commit

Permalink
Revert to use Utils.doFetchFile
Browse files Browse the repository at this point in the history
  • Loading branch information
wangyum committed Sep 24, 2019
1 parent 274ade7 commit e4ed806
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 13 deletions.
Expand Up @@ -1302,15 +1302,13 @@ private[spark] object SparkSubmitUtils {
* @param coordinates Comma-delimited string of maven coordinates
* @param ivySettings An IvySettings containing resolvers to use
* @param exclusions Exclusions to apply when resolving transitive dependencies
* @param isTransitive If the dependencies should be resolved transitively
* @return The comma-delimited path to the jars of the given maven artifacts including their
* transitive dependencies
*/
def resolveMavenCoordinates(
coordinates: String,
ivySettings: IvySettings,
exclusions: Seq[String] = Nil,
isTransitive: Boolean = true,
isTest: Boolean = false): String = {
if (coordinates == null || coordinates.trim.isEmpty) {
""
Expand All @@ -1332,7 +1330,7 @@ private[spark] object SparkSubmitUtils {
val ivy = Ivy.newInstance(ivySettings)
// Set resolve options to download transitive dependencies as well
val resolveOptions = new ResolveOptions
resolveOptions.setTransitive(isTransitive)
resolveOptions.setTransitive(true)
val retrieveOptions = new RetrieveOptions
// Turn downloading and logging off for testing
if (isTest) {
Expand Down
Expand Up @@ -32,7 +32,6 @@ import org.apache.hadoop.hive.ql.exec.FunctionRegistry
import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.deploy.SparkSubmitUtils
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config
import org.apache.spark.internal.config.UI._
Expand Down Expand Up @@ -650,14 +649,22 @@ private[sql] class TestHiveSessionStateBuilder(
}

private[hive] object HiveTestJars {
def getHiveContribJar: File = getHiveTestJar("org.apache.hive:hive-contrib")
def getHiveHcatalogCoreJar: File = getHiveTestJar("org.apache.hive.hcatalog:hive-hcatalog-core")

private def getHiveTestJar(coordinate: String): File = {
// isTransitive = false: Only direct dependencies should be resolved.
val filePath = SparkSubmitUtils.resolveMavenCoordinates(
s"$coordinate:${HiveUtils.builtinHiveVersion}",
SparkSubmitUtils.buildIvySettings(None, None), isTransitive = false)
new File(filePath)
private val repository = "https://repository.apache.org/content/repositories/releases/"
private val hiveTestJarsDir = Utils.createTempDir()

def getHiveContribJar: File =
getJarFromUrl(s"${repository}org/apache/hive/hive-contrib/" +
s"${HiveUtils.builtinHiveVersion}/hive-contrib-${HiveUtils.builtinHiveVersion}.jar")
def getHiveHcatalogCoreJar: File =
getJarFromUrl(s"${repository}org/apache/hive/hcatalog/hive-hcatalog-core/" +
s"${HiveUtils.builtinHiveVersion}/hive-hcatalog-core-${HiveUtils.builtinHiveVersion}.jar")

private def getJarFromUrl(urlString: String): File = {
val fileName = urlString.split("/").last
val targetFile = new File(hiveTestJarsDir, fileName)
if (!targetFile.exists()) {
Utils.doFetchFile(urlString, hiveTestJarsDir, fileName, new SparkConf, null, null)
}
targetFile
}
}

0 comments on commit e4ed806

Please sign in to comment.