Skip to content

Commit

Permalink
Merge branch 'master' of github.com:apache/spark into hive-distribution
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewor14 committed May 5, 2014
2 parents 7855f58 + 3292e2a commit fa205e1
Show file tree
Hide file tree
Showing 6 changed files with 97 additions and 56 deletions.
6 changes: 0 additions & 6 deletions core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,6 @@
<dependency>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
Expand Down
4 changes: 1 addition & 3 deletions core/src/main/scala/org/apache/spark/SparkEnv.scala
Original file line number Diff line number Diff line change
Expand Up @@ -156,13 +156,11 @@ object SparkEnv extends Logging {
conf.set("spark.driver.port", boundPort.toString)
}

val classLoader = Thread.currentThread.getContextClassLoader

// Create an instance of the class named by the given Java system property, or by
// defaultClassName if the property is not set, and return it as a T
def instantiateClass[T](propertyName: String, defaultClassName: String): T = {
val name = conf.get(propertyName, defaultClassName)
val cls = Class.forName(name, true, classLoader)
val cls = Class.forName(name, true, Utils.getContextOrSparkClassLoader)
// First try with the constructor that takes SparkConf. If we can't find one,
// use a no-arg constructor instead.
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import org.xerial.snappy.{SnappyInputStream, SnappyOutputStream}

import org.apache.spark.SparkConf
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.util.Utils

/**
* :: DeveloperApi ::
Expand All @@ -49,7 +50,7 @@ private[spark] object CompressionCodec {
}

def createCodec(conf: SparkConf, codecName: String): CompressionCodec = {
val ctor = Class.forName(codecName, true, Thread.currentThread.getContextClassLoader)
val ctor = Class.forName(codecName, true, Utils.getContextOrSparkClassLoader)
.getConstructor(classOf[SparkConf])
ctor.newInstance(conf).asInstanceOf[CompressionCodec]
}
Expand Down
51 changes: 41 additions & 10 deletions docs/building-with-maven.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,22 @@ You can fix this by setting the `MAVEN_OPTS` variable as discussed before.

## Specifying the Hadoop version ##

Because HDFS is not protocol-compatible across versions, if you want to read from HDFS, you'll need to build Spark against the specific HDFS version in your environment. You can do this through the "hadoop.version" property. If unset, Spark will build against Hadoop 1.0.4 by default.

For Apache Hadoop versions 1.x, Cloudera CDH MRv1, and other Hadoop versions without YARN, use:
Because HDFS is not protocol-compatible across versions, if you want to read from HDFS, you'll need to build Spark against the specific HDFS version in your environment. You can do this through the "hadoop.version" property. If unset, Spark will build against Hadoop 1.0.4 by default. Note that certain build profiles are required for particular Hadoop versions:

<table class="table">
<thead>
<tr><th>Hadoop version</th><th>Profile required</th></tr>
</thead>
<tbody>
<tr><td>0.23.x</td><td>hadoop-0.23</td></tr>
<tr><td>1.x to 2.1.x</td><td>(none)</td></tr>
<tr><td>2.2.x</td><td>hadoop-2.2</td></tr>
<tr><td>2.3.x</td><td>hadoop-2.3</td></tr>
<tr><td>2.4.x</td><td>hadoop-2.4</td></tr>
</tbody>
</table>

For Apache Hadoop versions 1.x, Cloudera CDH "mr1" distributions, and other Hadoop versions without YARN, use:

# Apache Hadoop 1.2.1
$ mvn -Dhadoop.version=1.2.1 -DskipTests clean package
Expand All @@ -42,22 +55,40 @@ For Apache Hadoop versions 1.x, Cloudera CDH MRv1, and other Hadoop versions wit
# Apache Hadoop 0.23.x
$ mvn -Phadoop-0.23 -Dhadoop.version=0.23.7 -DskipTests clean package

For Apache Hadoop 2.x, 0.23.x, Cloudera CDH MRv2, and other Hadoop versions with YARN, you can enable the "yarn-alpha" or "yarn" profile and set the "hadoop.version", "yarn.version" property. Note that Hadoop 0.23.X requires a special `-Phadoop-0.23` profile:
For Apache Hadoop 2.x, 0.23.x, Cloudera CDH, and other Hadoop versions with YARN, you can enable the "yarn-alpha" or "yarn" profile and optionally set the "yarn.version" property if it is different from "hadoop.version". The additional build profile required depends on the YARN version:

<table class="table">
<thead>
<tr><th>YARN version</th><th>Profile required</th></tr>
</thead>
<tbody>
<tr><td>0.23.x to 2.1.x</td><td>yarn-alpha</td></tr>
<tr><td>2.2.x and later</td><td>yarn</td></tr>
</tbody>
</table>

Examples:

# Apache Hadoop 2.0.5-alpha
$ mvn -Pyarn-alpha -Dhadoop.version=2.0.5-alpha -DskipTests clean package

# Cloudera CDH 4.2.0 with MapReduce v2
# Cloudera CDH 4.2.0
$ mvn -Pyarn-alpha -Dhadoop.version=2.0.0-cdh4.2.0 -DskipTests clean package

# Apache Hadoop 2.2.X (e.g. 2.2.0 as below) and newer
$ mvn -Pyarn -Dhadoop.version=2.2.0 -DskipTests clean package

# Apache Hadoop 0.23.x
$ mvn -Pyarn-alpha -Phadoop-0.23 -Dhadoop.version=0.23.7 -Dyarn.version=0.23.7 -DskipTests clean package
$ mvn -Pyarn-alpha -Phadoop-0.23 -Dhadoop.version=0.23.7 -DskipTests clean package

# Apache Hadoop 2.2.X
$ mvn -Pyarn -Phadoop-2.2 -Dhadoop.version=2.2.0 -DskipTests clean package

# Apache Hadoop 2.3.X
$ mvn -Pyarn -Phadoop-2.3 -Dhadoop.version=2.3.0 -DskipTests clean package

# Apache Hadoop 2.4.X
$ mvn -Pyarn -Phadoop-2.4 -Dhadoop.version=2.4.0 -DskipTests clean package

# Different versions of HDFS and YARN.
$ mvn -Pyarn-alpha -Dhadoop.version=2.3.0 -Dyarn.version=0.23.7 -DskipTests clean package
$ mvn -Pyarn-alpha -Phadoop-2.3 -Dhadoop.version=2.3.0 -Dyarn.version=0.23.7 -DskipTests clean package

## Spark Tests in Maven ##

Expand Down
84 changes: 50 additions & 34 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@
<chill.version>0.3.6</chill.version>
<codahale.metrics.version>3.0.0</codahale.metrics.version>
<avro.version>1.7.4</avro.version>
<jets3t.version>0.7.1</jets3t.version>

<PermGen>64m</PermGen>
<MaxPermGen>512m</MaxPermGen>
Expand Down Expand Up @@ -560,10 +561,18 @@
</exclusion>
</exclusions>
</dependency>
<!-- See SPARK-1556 for info on this dependency: -->
<dependency>
<groupId>net.java.dev.jets3t</groupId>
<artifactId>jets3t</artifactId>
<version>0.7.1</version>
<version>${jets3t.version}</version>
<scope>runtime</scope>
<exclusions>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
Expand Down Expand Up @@ -843,36 +852,6 @@
</build>

<profiles>
<!-- SPARK-1121: Adds an explicit dependency on Avro to work around a Hadoop 0.23.X issue -->
<profile>
<id>hadoop-0.23</id>
<dependencies>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
</dependency>
</dependencies>
</profile>

<profile>
<id>yarn-alpha</id>
<properties>
<hadoop.major.version>2</hadoop.major.version>
<!-- 0.23.* is same as 2.0.* - except hardened to run production jobs -->
<hadoop.version>0.23.7</hadoop.version>
<!--<hadoop.version>2.0.5-alpha</hadoop.version> -->
</properties>
<dependencies>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
</dependency>
</dependencies>
<modules>
<module>yarn</module>
</modules>

</profile>

<!-- Ganglia integration is not included by default due to LGPL-licensed code -->
<profile>
Expand Down Expand Up @@ -907,17 +886,54 @@

</profile>

<!-- A series of build profiles where customizations for particular Hadoop releases can be made -->

<profile>
<id>yarn</id>
<id>hadoop-0.23</id>
<!-- SPARK-1121: Adds an explicit dependency on Avro to work around a Hadoop 0.23.X issue -->
<dependencies>
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
</dependency>
</dependencies>
</profile>

<profile>
<id>hadoop-2.2</id>
<properties>
<protobuf.version>2.5.0</protobuf.version>
</properties>
</profile>

<profile>
<id>hadoop-2.3</id>
<properties>
<hadoop.major.version>2</hadoop.major.version>
<hadoop.version>2.2.0</hadoop.version>
<protobuf.version>2.5.0</protobuf.version>
<jets3t.version>0.9.0</jets3t.version>
</properties>
</profile>

<profile>
<id>hadoop-2.4</id>
<properties>
<protobuf.version>2.5.0</protobuf.version>
<jets3t.version>0.9.0</jets3t.version>
</properties>
</profile>

<profile>
<id>yarn-alpha</id>
<modules>
<module>yarn</module>
</modules>
</profile>

<profile>
<id>yarn</id>
<modules>
<module>yarn</module>
</modules>
</profile>

<!-- Build without Hadoop dependencies that are included in some runtime environments. -->
Expand Down
5 changes: 3 additions & 2 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ object SparkBuild extends Build {
lazy val hadoopVersion = Properties.envOrElse("SPARK_HADOOP_VERSION", DEFAULT_HADOOP_VERSION)
lazy val isNewHadoop = Properties.envOrNone("SPARK_IS_NEW_HADOOP") match {
case None => {
val isNewHadoopVersion = "2.[2-9]+".r.findFirstIn(hadoopVersion).isDefined
val isNewHadoopVersion = "^2\\.[2-9]+".r.findFirstIn(hadoopVersion).isDefined
(isNewHadoopVersion|| DEFAULT_IS_NEW_HADOOP)
}
case Some(v) => v.toBoolean
Expand Down Expand Up @@ -297,6 +297,7 @@ object SparkBuild extends Build {
val chillVersion = "0.3.6"
val codahaleMetricsVersion = "3.0.0"
val jblasVersion = "1.2.3"
val jets3tVersion = if ("^2\\.[3-9]+".r.findFirstIn(hadoopVersion).isDefined) "0.9.0" else "0.7.1"
val jettyVersion = "8.1.14.v20131031"
val hiveVersion = "0.12.0"
val parquetVersion = "1.3.2"
Expand Down Expand Up @@ -343,7 +344,7 @@ object SparkBuild extends Build {
"colt" % "colt" % "1.2.0",
"org.apache.mesos" % "mesos" % "0.13.0",
"commons-net" % "commons-net" % "2.2",
"net.java.dev.jets3t" % "jets3t" % "0.7.1" excludeAll(excludeCommonsLogging),
"net.java.dev.jets3t" % "jets3t" % jets3tVersion excludeAll(excludeCommonsLogging),
"org.apache.derby" % "derby" % "10.4.2.0" % "test",
"org.apache.hadoop" % hadoopClient % hadoopVersion excludeAll(excludeNetty, excludeAsm, excludeCommonsLogging, excludeSLF4J, excludeOldAsm),
"org.apache.curator" % "curator-recipes" % "2.4.0" excludeAll(excludeNetty),
Expand Down

0 comments on commit fa205e1

Please sign in to comment.