From b58f74054f9c02b0548254984dfe46516fe14e18 Mon Sep 17 00:00:00 2001 From: Jakub Nowacki Date: Fri, 6 Oct 2017 14:06:15 +0200 Subject: [PATCH] [SPARK-22495] Fix setup of SPARK_HOME variable on Windows --- appveyor.yml | 1 + bin/find-spark-home.cmd | 60 +++++++++++++++++++++++++++++++++++++++++ bin/pyspark2.cmd | 2 +- bin/run-example.cmd | 4 ++- bin/spark-class2.cmd | 2 +- bin/spark-shell2.cmd | 4 ++- bin/sparkR2.cmd | 2 +- 7 files changed, 70 insertions(+), 5 deletions(-) create mode 100644 bin/find-spark-home.cmd diff --git a/appveyor.yml b/appveyor.yml index dc2d81fcdc091..48740920cd09b 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -33,6 +33,7 @@ only_commits: - core/src/main/scala/org/apache/spark/api/r/ - mllib/src/main/scala/org/apache/spark/ml/r/ - core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala + - bin/*.cmd cache: - C:\Users\appveyor\.m2 diff --git a/bin/find-spark-home.cmd b/bin/find-spark-home.cmd new file mode 100644 index 0000000000000..c75e7eedb9418 --- /dev/null +++ b/bin/find-spark-home.cmd @@ -0,0 +1,60 @@ +@echo off + +rem +rem Licensed to the Apache Software Foundation (ASF) under one or more +rem contributor license agreements. See the NOTICE file distributed with +rem this work for additional information regarding copyright ownership. +rem The ASF licenses this file to You under the Apache License, Version 2.0 +rem (the "License"); you may not use this file except in compliance with +rem the License. You may obtain a copy of the License at +rem +rem http://www.apache.org/licenses/LICENSE-2.0 +rem +rem Unless required by applicable law or agreed to in writing, software +rem distributed under the License is distributed on an "AS IS" BASIS, +rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +rem See the License for the specific language governing permissions and +rem limitations under the License. +rem + +rem Path to Python script finding SPARK_HOME +set FIND_SPARK_HOME_PYTHON_SCRIPT=%~dp0find_spark_home.py + +rem Default to standard python interpreter unless told otherwise +set PYTHON_RUNNER=python +rem If PYSPARK_DRIVER_PYTHON is set, it overwrites the python version +if not "x%PYSPARK_DRIVER_PYTHON%"=="x" ( + set PYTHON_RUNNER=%PYSPARK_DRIVER_PYTHON% +) +rem If PYSPARK_PYTHON is set, it overwrites the python version +if not "x%PYSPARK_PYTHON%"=="x" ( + set PYTHON_RUNNER=%PYSPARK_PYTHON% +) + +rem If there is python installed, trying to use the root dir as SPARK_HOME +where %PYTHON_RUNNER% > nul 2>$1 +if %ERRORLEVEL% neq 0 ( + if not exist %PYTHON_RUNNER% ( + if "x%SPARK_HOME%"=="x" ( + echo Missing Python executable '%PYTHON_RUNNER%', defaulting to '%~dp0..' for SPARK_HOME ^ +environment variable. Please install Python or specify the correct Python executable in ^ +PYSPARK_DRIVER_PYTHON or PYSPARK_PYTHON environment variable to detect SPARK_HOME safely. + set SPARK_HOME=%~dp0.. + ) + ) +) + +rem Only attempt to find SPARK_HOME if it is not set. +if "x%SPARK_HOME%"=="x" ( + if not exist "%FIND_SPARK_HOME_PYTHON_SCRIPT%" ( + rem If we are not in the same directory as find_spark_home.py we are not pip installed so we don't + rem need to search the different Python directories for a Spark installation. + rem Note only that, if the user has pip installed PySpark but is directly calling pyspark-shell or + rem spark-submit in another directory we want to use that version of PySpark rather than the + rem pip installed version of PySpark. + set SPARK_HOME=%~dp0.. + ) else ( + rem We are pip installed, use the Python script to resolve a reasonable SPARK_HOME + for /f "delims=" %%i in ('%PYTHON_RUNNER% %FIND_SPARK_HOME_PYTHON_SCRIPT%') do set SPARK_HOME=%%i + ) +) diff --git a/bin/pyspark2.cmd b/bin/pyspark2.cmd index 46d4d5c883cfb..663670f2fddaf 100644 --- a/bin/pyspark2.cmd +++ b/bin/pyspark2.cmd @@ -18,7 +18,7 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME=%~dp0.. +call "%~dp0find-spark-home.cmd" call "%SPARK_HOME%\bin\load-spark-env.cmd" set _SPARK_CMD_USAGE=Usage: bin\pyspark.cmd [options] diff --git a/bin/run-example.cmd b/bin/run-example.cmd index f9b786e92b823..7cfaa7e996e89 100644 --- a/bin/run-example.cmd +++ b/bin/run-example.cmd @@ -17,6 +17,8 @@ rem See the License for the specific language governing permissions and rem limitations under the License. rem -set SPARK_HOME=%~dp0.. +rem Figure out where the Spark framework is installed +call "%~dp0find-spark-home.cmd" + set _SPARK_CMD_USAGE=Usage: ./bin/run-example [options] example-class [example args] cmd /V /E /C "%~dp0spark-submit.cmd" run-example %* diff --git a/bin/spark-class2.cmd b/bin/spark-class2.cmd index a93fd2f0e54bc..5da7d7a430d79 100644 --- a/bin/spark-class2.cmd +++ b/bin/spark-class2.cmd @@ -18,7 +18,7 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME=%~dp0.. +call "%~dp0find-spark-home.cmd" call "%SPARK_HOME%\bin\load-spark-env.cmd" diff --git a/bin/spark-shell2.cmd b/bin/spark-shell2.cmd index 7b5d396be888c..aaf71906c6526 100644 --- a/bin/spark-shell2.cmd +++ b/bin/spark-shell2.cmd @@ -17,7 +17,9 @@ rem See the License for the specific language governing permissions and rem limitations under the License. rem -set SPARK_HOME=%~dp0.. +rem Figure out where the Spark framework is installed +call "%~dp0find-spark-home.cmd" + set _SPARK_CMD_USAGE=Usage: .\bin\spark-shell.cmd [options] rem SPARK-4161: scala does not assume use of the java classpath, diff --git a/bin/sparkR2.cmd b/bin/sparkR2.cmd index 459b780e2ae33..b48bea345c0b9 100644 --- a/bin/sparkR2.cmd +++ b/bin/sparkR2.cmd @@ -18,7 +18,7 @@ rem limitations under the License. rem rem Figure out where the Spark framework is installed -set SPARK_HOME=%~dp0.. +call "%~dp0find-spark-home.cmd" call "%SPARK_HOME%\bin\load-spark-env.cmd"