Skip to content

Commit

Permalink
Filter out spark-submit options when starting Python gateway
Browse files Browse the repository at this point in the history
  • Loading branch information
liancheng committed Aug 9, 2014
1 parent e630d19 commit 5afc584
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions bin/pyspark
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ FWDIR="$(cd `dirname $0`/..; pwd)"
# Export this as SPARK_HOME
export SPARK_HOME="$FWDIR"

source $FWDIR/bin/utils.sh

SCALA_VERSION=2.10

if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
Expand Down Expand Up @@ -67,9 +69,10 @@ fi
# We export Spark submit arguments as an environment variable because shell.py must run as a
# PYTHONSTARTUP script, which does not take in arguments. This is required for IPython notebooks.

gatherSparkSubmitOpts $@
PYSPARK_SUBMIT_ARGS=""
whitespace="[[:space:]]"
for i in "$@"; do
for i in ${SUBMISSION_OPTS[@]}; do
if [[ $i =~ \" ]]; then i=$(echo $i | sed 's/\"/\\\"/g'); fi
if [[ $i =~ $whitespace ]]; then i=\"$i\"; fi
PYSPARK_SUBMIT_ARGS="$PYSPARK_SUBMIT_ARGS $i"
Expand All @@ -86,8 +89,6 @@ if [[ -n "$SPARK_TESTING" ]]; then
exit
fi

source $FWDIR/bin/utils.sh

# If a python file is provided, directly run spark-submit.
if [[ "$1" =~ \.py$ ]]; then
echo -e "\nWARNING: Running python applications through ./bin/pyspark is deprecated as of Spark 1.0." 1>&2
Expand Down

0 comments on commit 5afc584

Please sign in to comment.