Skip to content

Commit

Permalink
Merge remote-tracking branch 'apache/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
texasmichelle committed Apr 8, 2015
2 parents aa20a6e + 15e0d2b commit 25229c6
Show file tree
Hide file tree
Showing 515 changed files with 11,580 additions and 5,188 deletions.
59 changes: 59 additions & 0 deletions bin/load-spark-env.cmd
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
@echo off

rem
rem Licensed to the Apache Software Foundation (ASF) under one or more
rem contributor license agreements. See the NOTICE file distributed with
rem this work for additional information regarding copyright ownership.
rem The ASF licenses this file to You under the Apache License, Version 2.0
rem (the "License"); you may not use this file except in compliance with
rem the License. You may obtain a copy of the License at
rem
rem http://www.apache.org/licenses/LICENSE-2.0
rem
rem Unless required by applicable law or agreed to in writing, software
rem distributed under the License is distributed on an "AS IS" BASIS,
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
rem See the License for the specific language governing permissions and
rem limitations under the License.
rem

rem This script loads spark-env.cmd if it exists, and ensures it is only loaded once.
rem spark-env.cmd is loaded from SPARK_CONF_DIR if set, or within the current directory's
rem conf/ subdirectory.

if [%SPARK_ENV_LOADED%] == [] (
set SPARK_ENV_LOADED=1

if not [%SPARK_CONF_DIR%] == [] (
set user_conf_dir=%SPARK_CONF_DIR%
) else (
set user_conf_dir=%~dp0..\..\conf
)

call :LoadSparkEnv
)

rem Setting SPARK_SCALA_VERSION if not already set.

set ASSEMBLY_DIR2=%SPARK_HOME%/assembly/target/scala-2.11
set ASSEMBLY_DIR1=%SPARK_HOME%/assembly/target/scala-2.10

if [%SPARK_SCALA_VERSION%] == [] (

if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
echo "Presence of build for both scala versions(SCALA 2.10 and SCALA 2.11) detected."
echo "Either clean one of them or, set SPARK_SCALA_VERSION=2.11 in spark-env.cmd."
exit 1
)
if exist %ASSEMBLY_DIR2% (
set SPARK_SCALA_VERSION=2.11
) else (
set SPARK_SCALA_VERSION=2.10
)
)
exit /b 0

:LoadSparkEnv
if exist "%user_conf_dir%\spark-env.cmd" (
call "%user_conf_dir%\spark-env.cmd"
)
3 changes: 1 addition & 2 deletions bin/pyspark2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@ rem
rem Figure out where the Spark framework is installed
set SPARK_HOME=%~dp0..

rem Load environment variables from conf\spark-env.cmd, if it exists
if exist "%SPARK_HOME%\conf\spark-env.cmd" call "%SPARK_HOME%\conf\spark-env.cmd"
call %SPARK_HOME%\bin\load-spark-env.cmd

rem Figure out which Python to use.
if "x%PYSPARK_DRIVER_PYTHON%"=="x" (
Expand Down
3 changes: 1 addition & 2 deletions bin/run-example2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ set FWDIR=%~dp0..\
rem Export this as SPARK_HOME
set SPARK_HOME=%FWDIR%

rem Load environment variables from conf\spark-env.cmd, if it exists
if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
call %SPARK_HOME%\bin\load-spark-env.cmd

rem Test that an argument was given
if not "x%1"=="x" goto arg_given
Expand Down
61 changes: 36 additions & 25 deletions bin/spark-class
Original file line number Diff line number Diff line change
Expand Up @@ -40,35 +40,46 @@ else
fi
fi

# Look for the launcher. In non-release mode, add the compiled classes directly to the classpath
# instead of looking for a jar file.
SPARK_LAUNCHER_CP=
if [ -f $SPARK_HOME/RELEASE ]; then
LAUNCHER_DIR="$SPARK_HOME/lib"
num_jars="$(ls -1 "$LAUNCHER_DIR" | grep "^spark-launcher.*\.jar$" | wc -l)"
if [ "$num_jars" -eq "0" -a -z "$SPARK_LAUNCHER_CP" ]; then
echo "Failed to find Spark launcher in $LAUNCHER_DIR." 1>&2
echo "You need to build Spark before running this program." 1>&2
exit 1
fi
# Find assembly jar
SPARK_ASSEMBLY_JAR=
if [ -f "$SPARK_HOME/RELEASE" ]; then
ASSEMBLY_DIR="$SPARK_HOME/lib"
else
ASSEMBLY_DIR="$SPARK_HOME/assembly/target/scala-$SPARK_SCALA_VERSION"
fi

LAUNCHER_JARS="$(ls -1 "$LAUNCHER_DIR" | grep "^spark-launcher.*\.jar$" || true)"
if [ "$num_jars" -gt "1" ]; then
echo "Found multiple Spark launcher jars in $LAUNCHER_DIR:" 1>&2
echo "$LAUNCHER_JARS" 1>&2
echo "Please remove all but one jar." 1>&2
exit 1
fi
num_jars="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*hadoop.*\.jar$" | wc -l)"
if [ "$num_jars" -eq "0" -a -z "$SPARK_ASSEMBLY_JAR" ]; then
echo "Failed to find Spark assembly in $ASSEMBLY_DIR." 1>&2
echo "You need to build Spark before running this program." 1>&2
exit 1
fi
ASSEMBLY_JARS="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*hadoop.*\.jar$" || true)"
if [ "$num_jars" -gt "1" ]; then
echo "Found multiple Spark assembly jars in $ASSEMBLY_DIR:" 1>&2
echo "$ASSEMBLY_JARS" 1>&2
echo "Please remove all but one jar." 1>&2
exit 1
fi

SPARK_LAUNCHER_CP="${LAUNCHER_DIR}/${LAUNCHER_JARS}"
SPARK_ASSEMBLY_JAR="${ASSEMBLY_DIR}/${ASSEMBLY_JARS}"

# Verify that versions of java used to build the jars and run Spark are compatible
if [ -n "$JAVA_HOME" ]; then
JAR_CMD="$JAVA_HOME/bin/jar"
else
LAUNCHER_DIR="$SPARK_HOME/launcher/target/scala-$SPARK_SCALA_VERSION"
if [ ! -d "$LAUNCHER_DIR/classes" ]; then
echo "Failed to find Spark launcher classes in $LAUNCHER_DIR." 1>&2
echo "You need to build Spark before running this program." 1>&2
JAR_CMD="jar"
fi

if [ $(command -v "$JAR_CMD") ] ; then
jar_error_check=$("$JAR_CMD" -tf "$SPARK_ASSEMBLY_JAR" nonexistent/class/path 2>&1)
if [[ "$jar_error_check" =~ "invalid CEN header" ]]; then
echo "Loading Spark jar with '$JAR_CMD' failed. " 1>&2
echo "This is likely because Spark was compiled with Java 7 and run " 1>&2
echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark " 1>&2
echo "or build Spark with Java 6." 1>&2
exit 1
fi
SPARK_LAUNCHER_CP="$LAUNCHER_DIR/classes"
fi

# The launcher library will print arguments separated by a NULL character, to allow arguments with
Expand All @@ -77,7 +88,7 @@ fi
CMD=()
while IFS= read -d '' -r ARG; do
CMD+=("$ARG")
done < <("$RUNNER" -cp "$SPARK_LAUNCHER_CP" org.apache.spark.launcher.Main "$@")
done < <("$RUNNER" -cp "$SPARK_ASSEMBLY_JAR" org.apache.spark.launcher.Main "$@")

if [ "${CMD[0]}" = "usage" ]; then
"${CMD[@]}"
Expand Down
36 changes: 12 additions & 24 deletions bin/spark-class2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,40 +20,28 @@ rem
rem Figure out where the Spark framework is installed
set SPARK_HOME=%~dp0..

rem Load environment variables from conf\spark-env.cmd, if it exists
if exist "%SPARK_HOME%\conf\spark-env.cmd" call "%SPARK_HOME%\conf\spark-env.cmd"
call %SPARK_HOME%\bin\load-spark-env.cmd

rem Test that an argument was given
if "x%1"=="x" (
echo Usage: spark-class ^<class^> [^<args^>]
exit /b 1
)

set LAUNCHER_CP=0
if exist %SPARK_HOME%\RELEASE goto find_release_launcher
rem Find assembly jar
set SPARK_ASSEMBLY_JAR=0

rem Look for the Spark launcher in both Scala build directories. The launcher doesn't use Scala so
rem it doesn't really matter which one is picked up. Add the compiled classes directly to the
rem classpath instead of looking for a jar file, since it's very common for people using sbt to use
rem the "assembly" target instead of "package".
set LAUNCHER_CLASSES=%SPARK_HOME%\launcher\target\scala-2.10\classes
if exist %LAUNCHER_CLASSES% (
set LAUNCHER_CP=%LAUNCHER_CLASSES%
if exist "%SPARK_HOME%\RELEASE" (
set ASSEMBLY_DIR=%SPARK_HOME%\lib
) else (
set ASSEMBLY_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%
)
set LAUNCHER_CLASSES=%SPARK_HOME%\launcher\target\scala-2.11\classes
if exist %LAUNCHER_CLASSES% (
set LAUNCHER_CP=%LAUNCHER_CLASSES%
)
goto check_launcher

:find_release_launcher
for %%d in (%SPARK_HOME%\lib\spark-launcher*.jar) do (
set LAUNCHER_CP=%%d
for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do (
set SPARK_ASSEMBLY_JAR=%%d
)

:check_launcher
if "%LAUNCHER_CP%"=="0" (
echo Failed to find Spark launcher JAR.
if "%SPARK_ASSEMBLY_JAR%"=="0" (
echo Failed to find Spark assembly JAR.
echo You need to build Spark before running this program.
exit /b 1
)
Expand All @@ -64,7 +52,7 @@ if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java

rem The launcher library prints the command to be executed in a single line suitable for being
rem executed by the batch interpreter. So read all the output of the launcher into a variable.
for /f "tokens=*" %%i in ('cmd /C ""%RUNNER%" -cp %LAUNCHER_CP% org.apache.spark.launcher.Main %*"') do (
for /f "tokens=*" %%i in ('cmd /C ""%RUNNER%" -cp %SPARK_ASSEMBLY_JAR% org.apache.spark.launcher.Main %*"') do (
set SPARK_CMD=%%i
)
%SPARK_CMD%
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ $(function() {

stripeSummaryTable();

$("input:checkbox").click(function() {
$('input[type="checkbox"]').click(function() {
var column = "table ." + $(this).attr("name");
$(column).toggle();
stripeSummaryTable();
Expand All @@ -39,15 +39,15 @@ $(function() {
$("#select-all-metrics").click(function() {
if (this.checked) {
// Toggle all un-checked options.
$('input:checkbox:not(:checked)').trigger('click');
$('input[type="checkbox"]:not(:checked)').trigger('click');
} else {
// Toggle all checked options.
$('input:checkbox:checked').trigger('click');
$('input[type="checkbox"]:checked').trigger('click');
}
});

// Trigger a click on the checkbox if a user clicks the label next to it.
$("span.additional-metric-title").click(function() {
$(this).parent().find('input:checkbox').trigger('click');
$(this).parent().find('input[type="checkbox"]').trigger('click');
});
});
23 changes: 11 additions & 12 deletions core/src/main/scala/org/apache/spark/Accumulators.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@
package org.apache.spark

import java.io.{ObjectInputStream, Serializable}
import java.util.concurrent.atomic.AtomicLong
import java.lang.ThreadLocal

import scala.collection.generic.Growable
import scala.collection.mutable.Map
Expand Down Expand Up @@ -109,7 +107,7 @@ class Accumulable[R, T] (
* The typical use of this method is to directly mutate the local value, eg., to add
* an element to a Set.
*/
def localValue = value_
def localValue: R = value_

/**
* Set the accumulator's value; only allowed on master.
Expand Down Expand Up @@ -137,7 +135,7 @@ class Accumulable[R, T] (
Accumulators.register(this, false)
}

override def toString = if (value_ == null) "null" else value_.toString
override def toString: String = if (value_ == null) "null" else value_.toString
}

/**
Expand Down Expand Up @@ -257,22 +255,22 @@ object AccumulatorParam {

implicit object DoubleAccumulatorParam extends AccumulatorParam[Double] {
def addInPlace(t1: Double, t2: Double): Double = t1 + t2
def zero(initialValue: Double) = 0.0
def zero(initialValue: Double): Double = 0.0
}

implicit object IntAccumulatorParam extends AccumulatorParam[Int] {
def addInPlace(t1: Int, t2: Int): Int = t1 + t2
def zero(initialValue: Int) = 0
def zero(initialValue: Int): Int = 0
}

implicit object LongAccumulatorParam extends AccumulatorParam[Long] {
def addInPlace(t1: Long, t2: Long) = t1 + t2
def zero(initialValue: Long) = 0L
def addInPlace(t1: Long, t2: Long): Long = t1 + t2
def zero(initialValue: Long): Long = 0L
}

implicit object FloatAccumulatorParam extends AccumulatorParam[Float] {
def addInPlace(t1: Float, t2: Float) = t1 + t2
def zero(initialValue: Float) = 0f
def addInPlace(t1: Float, t2: Float): Float = t1 + t2
def zero(initialValue: Float): Float = 0f
}

// TODO: Add AccumulatorParams for other types, e.g. lists and strings
Expand Down Expand Up @@ -351,6 +349,7 @@ private[spark] object Accumulators extends Logging {
}
}

def stringifyPartialValue(partialValue: Any) = "%s".format(partialValue)
def stringifyValue(value: Any) = "%s".format(value)
def stringifyPartialValue(partialValue: Any): String = "%s".format(partialValue)

def stringifyValue(value: Any): String = "%s".format(value)
}
6 changes: 3 additions & 3 deletions core/src/main/scala/org/apache/spark/Dependency.scala
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ class ShuffleDependency[K, V, C](
val mapSideCombine: Boolean = false)
extends Dependency[Product2[K, V]] {

override def rdd = _rdd.asInstanceOf[RDD[Product2[K, V]]]
override def rdd: RDD[Product2[K, V]] = _rdd.asInstanceOf[RDD[Product2[K, V]]]

val shuffleId: Int = _rdd.context.newShuffleId()

Expand All @@ -91,7 +91,7 @@ class ShuffleDependency[K, V, C](
*/
@DeveloperApi
class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {
override def getParents(partitionId: Int) = List(partitionId)
override def getParents(partitionId: Int): List[Int] = List(partitionId)
}


Expand All @@ -107,7 +107,7 @@ class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {
class RangeDependency[T](rdd: RDD[T], inStart: Int, outStart: Int, length: Int)
extends NarrowDependency[T](rdd) {

override def getParents(partitionId: Int) = {
override def getParents(partitionId: Int): List[Int] = {
if (partitionId >= outStart && partitionId < outStart + length) {
List(partitionId - outStart + inStart)
} else {
Expand Down
Loading

0 comments on commit 25229c6

Please sign in to comment.