Skip to content

Commit

Permalink
ZEPPELIN-377 CI hanging on ./testing/startSparkCluster.sh 1.4.0 2.3
Browse files Browse the repository at this point in the history
Address https://issues.apache.org/jira/browse/ZEPPELIN-377.

This patch change spark package download location from apache archive to mirror, to download in 10min.

Also add missing test for 1.5.1 and change test version from 1.4.0 to 1.4.1

Author: Lee moon soo <moon@apache.org>

Closes apache#380 from Leemoonsoo/fix_spark_test and squashes the following commits:

142583a [Lee moon soo] Add test for 1.5.1
b8323e6 [Lee moon soo] Use mirror for 1.3.x and later version of spark
  • Loading branch information
Leemoonsoo committed Nov 2, 2015
1 parent ac5ff10 commit 0a82a93
Show file tree
Hide file tree
Showing 4 changed files with 39 additions and 6 deletions.
17 changes: 14 additions & 3 deletions .travis.yml
Expand Up @@ -28,27 +28,38 @@ before_script:
-

script:
# spark 1.4
# spark 1.5
- mvn package -Pbuild-distr -Phadoop-2.3 -Ppyspark -B
- ./testing/startSparkCluster.sh 1.4.0 2.3
- ./testing/startSparkCluster.sh 1.5.1 2.3
- echo "export SPARK_HOME=`pwd`/spark-1.5.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn verify -Pusing-packaged-distr -Phadoop-2.3 -Ppyspark -B
- ./testing/stopSparkCluster.sh 1.4.0 2.3
- ./testing/stopSparkCluster.sh 1.5.1 2.3
# spark 1.4
- rm -rf `pwd`/interpreter/spark
- mvn package -DskipTests -Pspark-1.4 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
- ./testing/startSparkCluster.sh 1.4.1 2.3
- echo "export SPARK_HOME=`pwd`/spark-1.4.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn package -Pspark-1.4 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server' -Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
- ./testing/stopSparkCluster.sh 1.4.1 2.3
# spark 1.3
- rm -rf `pwd`/interpreter/spark
- mvn package -DskipTests -Pspark-1.3 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
- ./testing/startSparkCluster.sh 1.3.1 2.3
- echo "export SPARK_HOME=`pwd`/spark-1.3.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn package -Pspark-1.3 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server' -Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
- ./testing/stopSparkCluster.sh 1.3.1 2.3
# spark 1.2
- rm -rf `pwd`/interpreter/spark
- mvn package -Pspark-1.2 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
- ./testing/startSparkCluster.sh 1.2.1 2.3
- echo "export SPARK_HOME=`pwd`/spark-1.2.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn package -Pspark-1.2 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server' -Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
- ./testing/stopSparkCluster.sh 1.2.1 2.3
# spark 1.1
- rm -rf `pwd`/interpreter/spark
- mvn package -Pspark-1.1 -Phadoop-2.3 -Ppyspark -B -pl 'zeppelin-interpreter,spark-dependencies,spark'
- ./testing/startSparkCluster.sh 1.1.1 2.3
- echo "export SPARK_HOME=`pwd`/spark-1.1.1-bin-hadoop2.3" > conf/zeppelin-env.sh
- mvn package -Pspark-1.1 -Phadoop-2.3 -B -pl 'zeppelin-interpreter,zeppelin-zengine,zeppelin-server' -Dtest=org.apache.zeppelin.rest.*Test -DfailIfNoTests=false
- ./testing/stopSparkCluster.sh 1.1.1 2.3

Expand Down
1 change: 1 addition & 0 deletions pom.xml
Expand Up @@ -456,6 +456,7 @@
<exclude>STYLE.md</exclude>
<exclude>Roadmap.md</exclude>
<exclude>conf/interpreter.json</exclude>
<exclude>conf/zeppelin-env.sh</exclude>
<exclude>spark-*-bin*/**</exclude>
</excludes>
</configuration>
Expand Down
18 changes: 15 additions & 3 deletions testing/startSparkCluster.sh
Expand Up @@ -31,7 +31,19 @@ ZEPPELIN_HOME="$(cd "${FWDIR}/.."; pwd)"
export SPARK_HOME=${ZEPPELIN_HOME}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}
echo "SPARK_HOME is ${SPARK_HOME} "
if [ ! -d "${SPARK_HOME}" ]; then
wget -q http://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
echo "${SPARK_VERSION}" | grep "^1.[12].[0-9]" > /dev/null
if [ $? -eq 0 ]; then
# spark 1.1.x and spark 1.2.x can be downloaded from archive
wget http://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
else
# spark 1.3.x and later can be downloaded from mirror
# get download address from mirror
MIRROR_INFO=$(curl -s "http://www.apache.org/dyn/closer.cgi/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz?asjson=1")

PREFFERED=$(echo "${MIRROR_INFO}" | grep preferred | sed 's/[^"]*.preferred.: .\([^"]*\).*/\1/g')
PATHINFO=$(echo "${MIRROR_INFO}" | grep path_info | sed 's/[^"]*.path_info.: .\([^"]*\).*/\1/g')
wget "${PREFFERED}${PATHINFO}"
fi
tar zxf spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
fi

Expand All @@ -46,8 +58,8 @@ export SPARK_MASTER_WEBUI_PORT=7072
export SPARK_WORKER_WEBUI_PORT=8082
${SPARK_HOME}/sbin/start-master.sh

echo ${SPARK_VERSION} | grep "^1.4" > /dev/null
if [ $? -ne 0 ]; then # spark 1.3 or prior
echo ${SPARK_VERSION} | grep "^1.[123].[0-9]" > /dev/null
if [ $? -eq 0 ]; then # spark 1.3 or prior
${SPARK_HOME}/sbin/start-slave.sh 1 `hostname`:${SPARK_MASTER_PORT}
else
${SPARK_HOME}/sbin/start-slave.sh spark://`hostname`:7071
Expand Down
Expand Up @@ -22,13 +22,15 @@
import java.lang.ref.WeakReference;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethodBase;
import org.apache.commons.httpclient.methods.*;
import org.apache.zeppelin.interpreter.Interpreter.RegisteredInterpreter;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.InterpreterOption;
import org.apache.zeppelin.interpreter.InterpreterSetting;
Expand Down Expand Up @@ -197,6 +199,13 @@ private static boolean isActiveSparkHome(File dir) {

protected static void shutDown() throws Exception {
if (!wasRunning) {
// restart interpreter to stop all interpreter processes
List<String> settingList = ZeppelinServer.notebook.getInterpreterFactory()
.getDefaultInterpreterSettingList();
for (String setting : settingList) {
ZeppelinServer.notebook.getInterpreterFactory().restart(setting);
}

LOG.info("Terminating test Zeppelin...");
ZeppelinServer.jettyServer.stop();
executor.shutdown();
Expand Down

0 comments on commit 0a82a93

Please sign in to comment.