From fbb232f39271b7c0e41396c61a31bd8d9c4b32b5 Mon Sep 17 00:00:00 2001 From: WangTaoTheTonic Date: Wed, 17 Sep 2014 09:53:38 +0800 Subject: [PATCH 1/4] Using a special exit code instead of 1 to represent ClassNotFoundException --- bin/spark-sql | 2 +- core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala | 2 +- sbin/start-thriftserver.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bin/spark-sql b/bin/spark-sql index ae096530cad04..402345b6f8d82 100755 --- a/bin/spark-sql +++ b/bin/spark-sql @@ -24,7 +24,7 @@ set -o posix CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver" -CLASS_NOT_FOUND_EXIT_STATUS=1 +CLASS_NOT_FOUND_EXIT_STATUS=1024 # Figure out where Spark is installed FWDIR="$(cd "`dirname "$0"`"/..; pwd)" diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 5ed3575816a38..fcea99cfcbbda 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -54,7 +54,7 @@ object SparkSubmit { private val SPARK_SHELL = "spark-shell" private val PYSPARK_SHELL = "pyspark-shell" - private val CLASS_NOT_FOUND_EXIT_STATUS = 1 + private val CLASS_NOT_FOUND_EXIT_STATUS = 1024 // Exposed for testing private[spark] var exitFn: () => Unit = () => System.exit(-1) diff --git a/sbin/start-thriftserver.sh b/sbin/start-thriftserver.sh index 4ce40fe750384..a354fa79e1895 100755 --- a/sbin/start-thriftserver.sh +++ b/sbin/start-thriftserver.sh @@ -27,7 +27,7 @@ set -o posix FWDIR="$(cd "`dirname "$0"`"/..; pwd)" CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2" -CLASS_NOT_FOUND_EXIT_STATUS=1 +CLASS_NOT_FOUND_EXIT_STATUS=1024 function usage { echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]" From a2d6465cbf99c74969c4c3fc85be04810df76427 Mon Sep 17 00:00:00 2001 From: WangTaoTheTonic Date: Thu, 18 Sep 2014 09:20:31 +0800 Subject: [PATCH 2/4] use 127 instead --- bin/spark-sql | 2 +- core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala | 2 +- sbin/start-thriftserver.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bin/spark-sql b/bin/spark-sql index 402345b6f8d82..637ceef5657b7 100755 --- a/bin/spark-sql +++ b/bin/spark-sql @@ -24,7 +24,7 @@ set -o posix CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver" -CLASS_NOT_FOUND_EXIT_STATUS=1024 +CLASS_NOT_FOUND_EXIT_STATUS=127 # Figure out where Spark is installed FWDIR="$(cd "`dirname "$0"`"/..; pwd)" diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index fcea99cfcbbda..055cfa2a5db0d 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -54,7 +54,7 @@ object SparkSubmit { private val SPARK_SHELL = "spark-shell" private val PYSPARK_SHELL = "pyspark-shell" - private val CLASS_NOT_FOUND_EXIT_STATUS = 1024 + private val CLASS_NOT_FOUND_EXIT_STATUS = 127 // Exposed for testing private[spark] var exitFn: () => Unit = () => System.exit(-1) diff --git a/sbin/start-thriftserver.sh b/sbin/start-thriftserver.sh index a354fa79e1895..2f19cdbf11187 100755 --- a/sbin/start-thriftserver.sh +++ b/sbin/start-thriftserver.sh @@ -27,7 +27,7 @@ set -o posix FWDIR="$(cd "`dirname "$0"`"/..; pwd)" CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2" -CLASS_NOT_FOUND_EXIT_STATUS=1024 +CLASS_NOT_FOUND_EXIT_STATUS=127 function usage { echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]" From d6ae5591d0a3d7ab8de308159f94e6f2bc537f24 Mon Sep 17 00:00:00 2001 From: WangTaoTheTonic Date: Thu, 18 Sep 2014 09:59:50 +0800 Subject: [PATCH 3/4] use 101 instead --- bin/spark-sql | 2 +- core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala | 2 +- sbin/start-thriftserver.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bin/spark-sql b/bin/spark-sql index 637ceef5657b7..9d66140b6aa17 100755 --- a/bin/spark-sql +++ b/bin/spark-sql @@ -24,7 +24,7 @@ set -o posix CLASS="org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver" -CLASS_NOT_FOUND_EXIT_STATUS=127 +CLASS_NOT_FOUND_EXIT_STATUS=101 # Figure out where Spark is installed FWDIR="$(cd "`dirname "$0"`"/..; pwd)" diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 055cfa2a5db0d..5d15af1326ef0 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -54,7 +54,7 @@ object SparkSubmit { private val SPARK_SHELL = "spark-shell" private val PYSPARK_SHELL = "pyspark-shell" - private val CLASS_NOT_FOUND_EXIT_STATUS = 127 + private val CLASS_NOT_FOUND_EXIT_STATUS = 101 // Exposed for testing private[spark] var exitFn: () => Unit = () => System.exit(-1) diff --git a/sbin/start-thriftserver.sh b/sbin/start-thriftserver.sh index 2f19cdbf11187..ba953e763faab 100755 --- a/sbin/start-thriftserver.sh +++ b/sbin/start-thriftserver.sh @@ -27,7 +27,7 @@ set -o posix FWDIR="$(cd "`dirname "$0"`"/..; pwd)" CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2" -CLASS_NOT_FOUND_EXIT_STATUS=127 +CLASS_NOT_FOUND_EXIT_STATUS=101 function usage { echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]" From 645a22a3fe995bcdf0e0748a844591b92cbb3905 Mon Sep 17 00:00:00 2001 From: WangTaoTheTonic Date: Thu, 18 Sep 2014 16:31:58 +0800 Subject: [PATCH 4/4] Serveral typos to trigger Jenkins --- .../main/scala/org/apache/spark/network/nio/Connection.scala | 2 +- .../org/apache/spark/network/nio/ConnectionManager.scala | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/network/nio/Connection.scala b/core/src/main/scala/org/apache/spark/network/nio/Connection.scala index 74074a8dcbfff..18172d359cb35 100644 --- a/core/src/main/scala/org/apache/spark/network/nio/Connection.scala +++ b/core/src/main/scala/org/apache/spark/network/nio/Connection.scala @@ -460,7 +460,7 @@ private[spark] class ReceivingConnection( if (currId != null) currId else super.getRemoteConnectionManagerId() } - // The reciever's remote address is the local socket on remote side : which is NOT + // The receiver's remote address is the local socket on remote side : which is NOT // the connection manager id of the receiver. // We infer that from the messages we receive on the receiver socket. private def processConnectionManagerId(header: MessageChunkHeader) { diff --git a/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala b/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala index 09d3ea306515b..5aa7e94943561 100644 --- a/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala +++ b/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala @@ -501,7 +501,7 @@ private[nio] class ConnectionManager( def changeConnectionKeyInterest(connection: Connection, ops: Int) { keyInterestChangeRequests += ((connection.key, ops)) - // so that registerations happen ! + // so that registrations happen ! wakeupSelector() } @@ -832,7 +832,7 @@ private[nio] class ConnectionManager( } /** - * Send a message and block until an acknowldgment is received or an error occurs. + * Send a message and block until an acknowledgment is received or an error occurs. * @param connectionManagerId the message's destination * @param message the message being sent * @return a Future that either returns the acknowledgment message or captures an exception.