Skip to content

Commit

Permalink
Launcher library changes (SPARK-6406)
Browse files Browse the repository at this point in the history
  • Loading branch information
nishkamravi2 committed Mar 23, 2015
1 parent 345206a commit 3faa7a4
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 58 deletions.
6 changes: 3 additions & 3 deletions bin/spark-class
Expand Up @@ -43,13 +43,13 @@ fi
# Find assembly jar
SPARK_ASSEMBLY_JAR=
ASSEMBLY_DIR="$SPARK_HOME/lib"
num_jars="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*\.jar$" | wc -l)"
num_jars="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*hadoop.*\.jar$" | wc -l)"
if [ "$num_jars" -eq "0" -a -z "$SPARK_ASSEMBLY_JAR" ]; then
echo "Failed to find Spark assembly in $ASSEMBLY_DIR." 1>&2
echo "You need to build Spark before running this program." 1>&2
exit 1
fi
ASSEMBLY_JARS="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*\.jar$" || true)"
ASSEMBLY_JARS="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*hadoop.*\.jar$" || true)"
if [ "$num_jars" -gt "1" ]; then
echo "Found multiple Spark assembly jars in $ASSEMBLY_DIR:" 1>&2
echo "$ASSEMBLY_JARS" 1>&2
Expand All @@ -65,7 +65,7 @@ SPARK_ASSEMBLY_JAR="${ASSEMBLY_DIR}/${ASSEMBLY_JARS}"
CMD=()
while IFS= read -d '' -r ARG; do
CMD+=("$ARG")
done < <("$RUNNER" -cp "$SPARK_ASSEMBLY_JAR" org.apache.spark.launcher.Main "$@")
done < <("$RUNNER" -cp "$SPARK_ASSEMBLY_JAR" org.apache.spark.launcher.Main "$SPARK_ASSEMBLY_JAR" "$@")

if [ "${CMD[0]}" = "usage" ]; then
"${CMD[@]}"
Expand Down
32 changes: 7 additions & 25 deletions bin/spark-class2.cmd
Expand Up @@ -29,31 +29,13 @@ if "x%1"=="x" (
exit /b 1
)

set LAUNCHER_CP=0
if exist %SPARK_HOME%\RELEASE goto find_release_launcher

rem Look for the Spark launcher in both Scala build directories. The launcher doesn't use Scala so
rem it doesn't really matter which one is picked up. Add the compiled classes directly to the
rem classpath instead of looking for a jar file, since it's very common for people using sbt to use
rem the "assembly" target instead of "package".
set LAUNCHER_CLASSES=%SPARK_HOME%\launcher\target\scala-2.10\classes
if exist %LAUNCHER_CLASSES% (
set LAUNCHER_CP=%LAUNCHER_CLASSES%
)
set LAUNCHER_CLASSES=%SPARK_HOME%\launcher\target\scala-2.11\classes
if exist %LAUNCHER_CLASSES% (
set LAUNCHER_CP=%LAUNCHER_CLASSES%
rem Find assembly jar
set SPARK_ASSEMBLY_JAR=0
for %%d in (%SPARK_HOME%\lib\spark-assembly*hadoop*.jar) do (
set SPARK_ASSEMBLY_JAR=%%d
)
goto check_launcher

:find_release_launcher
for %%d in (%SPARK_HOME%\lib\spark-launcher*.jar) do (
set LAUNCHER_CP=%%d
)

:check_launcher
if "%LAUNCHER_CP%"=="0" (
echo Failed to find Spark launcher JAR.
if "%SPARK_ASSEMBLY_JAR%"=="0" (
echo Failed to find Spark assembly JAR.
echo You need to build Spark before running this program.
exit /b 1
)
Expand All @@ -64,7 +46,7 @@ if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java

rem The launcher library prints the command to be executed in a single line suitable for being
rem executed by the batch interpreter. So read all the output of the launcher into a variable.
for /f "tokens=*" %%i in ('cmd /C ""%RUNNER%" -cp %LAUNCHER_CP% org.apache.spark.launcher.Main %*"') do (
for /f "tokens=*" %%i in ('cmd /C ""%RUNNER%" -cp %SPARK_ASSEMBLY_JAR% org.apache.spark.launcher.Main %SPARK_ASSEMBLY_JAR% %*"') do (
set SPARK_CMD=%%i
)
%SPARK_CMD%
Expand Up @@ -86,10 +86,14 @@ public AbstractCommandBuilder() {
*/
List<String> buildJavaCommand(String extraClassPath) throws IOException {
List<String> cmd = new ArrayList<String>();
if (javaHome == null) {
cmd.add(join(File.separator, System.getProperty("java.home"), "bin", "java"));
} else {
String envJavaHome;

if (javaHome != null) {
cmd.add(join(File.separator, javaHome, "bin", "java"));
} else if ((envJavaHome = System.getenv("JAVA_HOME")) != null) {
cmd.add(join(File.separator, envJavaHome, "bin", "java"));
} else {
cmd.add(join(File.separator, System.getProperty("java.home"), "bin", "java"));
}

// Load extra JAVA_OPTS from conf/java-opts, if it exists.
Expand Down Expand Up @@ -182,7 +186,7 @@ List<String> buildClassPath(String appClassPath) throws IOException {
addToClassPath(cp, String.format("%s/core/target/jars/*", sparkHome));
}

String assembly = findAssembly(scala);
final String assembly = Main.uberJarPath;
addToClassPath(cp, assembly);

// When Hive support is needed, Datanucleus jars must be included on the classpath. Datanucleus
Expand Down Expand Up @@ -270,7 +274,6 @@ String getScalaVersion() {
if (scala != null) {
return scala;
}

String sparkHome = getSparkHome();
File scala210 = new File(sparkHome, "assembly/target/scala-2.10");
File scala211 = new File(sparkHome, "assembly/target/scala-2.11");
Expand Down Expand Up @@ -330,30 +333,6 @@ String getenv(String key) {
return firstNonEmpty(childEnv.get(key), System.getenv(key));
}

private String findAssembly(String scalaVersion) {
String sparkHome = getSparkHome();
File libdir;
if (new File(sparkHome, "RELEASE").isFile()) {
libdir = new File(sparkHome, "lib");
checkState(libdir.isDirectory(), "Library directory '%s' does not exist.",
libdir.getAbsolutePath());
} else {
libdir = new File(sparkHome, String.format("assembly/target/scala-%s", scalaVersion));
}

final Pattern re = Pattern.compile("spark-assembly.*\\.jar");
FileFilter filter = new FileFilter() {
@Override
public boolean accept(File file) {
return file.isFile() && re.matcher(file.getName()).matches();
}
};
File[] assemblies = libdir.listFiles(filter);
checkState(assemblies != null && assemblies.length > 0, "No assemblies found in '%s'.", libdir);
checkState(assemblies.length == 1, "Multiple assemblies found in '%s'.", libdir);
return assemblies[0].getAbsolutePath();
}

private String getConfDir() {
String confDir = getenv("SPARK_CONF_DIR");
return confDir != null ? confDir : join(File.separator, getSparkHome(), "conf");
Expand Down
6 changes: 5 additions & 1 deletion launcher/src/main/java/org/apache/spark/launcher/Main.java
Expand Up @@ -31,7 +31,7 @@
class Main {

/**
* Usage: Main [class] [class args]
* Usage: Main [uberJarPath] [class] [class args]
* <p/>
* This CLI works in two different modes:
* <ul>
Expand All @@ -47,10 +47,14 @@ class Main {
* character. On Windows, the output is a command line suitable for direct execution from the
* script.
*/

static String uberJarPath;

public static void main(String[] argsArray) throws Exception {
checkArgument(argsArray.length > 0, "Not enough arguments: missing class name.");

List<String> args = new ArrayList<String>(Arrays.asList(argsArray));
uberJarPath = args.remove(0);
String className = args.remove(0);

boolean printLaunchCommand;
Expand Down

0 comments on commit 3faa7a4

Please sign in to comment.