From 4ae3adb24424d8d79969a43bbf6489f4c662afe3 Mon Sep 17 00:00:00 2001 From: Lee moon soo Date: Sat, 21 Jan 2017 05:26:25 +0800 Subject: [PATCH 1/3] [ZEPPELIN-1455] Fix flaky test: AbstractAngularElemTest ### What is this PR for? This PR fix flaky test [ZEPPELIN-1455](https://issues.apache.org/jira/browse/ZEPPELIN-1455). According to http://doc.scalatest.org/1.8/org/scalatest/concurrent/Eventually.html, default timeout of eventually is 150millisecond. Set enough timeout for the test. ### What type of PR is it? Hot Fix ### Todos * [x] - increase timeout ### What is the Jira issue? https://issues.apache.org/jira/browse/ZEPPELIN-1455 ### Questions: * Does the licenses files need update? no * Is there breaking changes for older versions? no * Does this needs documentation? no Author: Lee moon soo Closes #1920 from Leemoonsoo/ZEPPELIN-1455 and squashes the following commits: 13a993d [Lee moon soo] Increase tolerance of eventually (cherry picked from commit 99c21c4) --- .../zeppelin/display/angular/AbstractAngularElemTest.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/zeppelin-display/src/test/scala/org/apache/zeppelin/display/angular/AbstractAngularElemTest.scala b/zeppelin-display/src/test/scala/org/apache/zeppelin/display/angular/AbstractAngularElemTest.scala index 9b5cd6269c8..dcf221ec57a 100644 --- a/zeppelin-display/src/test/scala/org/apache/zeppelin/display/angular/AbstractAngularElemTest.scala +++ b/zeppelin-display/src/test/scala/org/apache/zeppelin/display/angular/AbstractAngularElemTest.scala @@ -23,6 +23,7 @@ import org.apache.zeppelin.display.{AngularObject, AngularObjectRegistry, GUI} import org.apache.zeppelin.interpreter._ import org.apache.zeppelin.user.AuthenticationInfo import org.scalatest.concurrent.Eventually +import org.scalatest.time.{Seconds, Span} import org.scalatest.{BeforeAndAfter, BeforeAndAfterEach, FlatSpec, Matchers} /** @@ -69,12 +70,12 @@ trait AbstractAngularElemTest // click create thread for callback function to run. So it'll may not immediately invoked // after click. therefore eventually should be click(elem) - eventually { + eventually (timeout(Span(5, Seconds))) { a should be(1) } click(elem) - eventually { + eventually (timeout(Span(5, Seconds))) { a should be(2) } @@ -128,7 +129,7 @@ trait AbstractAngularElemTest click(elem) - eventually { modelValue should be("value")} + eventually (timeout(Span(5, Seconds))) { modelValue should be("value")} } From f1c8041f4a9b7122d64076c5b1b014a08128629a Mon Sep 17 00:00:00 2001 From: Igor Drozdov Date: Mon, 16 Jan 2017 17:19:35 +0800 Subject: [PATCH 2/3] [ZEPPELIN-1972] Preserve context classloader ### What is this PR for? `IMain.interpret` changes current thread's context classloader. It may cause different issues and definitely is the reason of [ZEPPELIN-1738](https://issues.apache.org/jira/browse/ZEPPELIN-1738) test failures. It's a known scala bug. See https://issues.scala-lang.org/browse/SI-9587 As a workaround we need to save and restore context classloader manually ### What type of PR is it? Bug Fix ### What is the Jira issue? [ZEPPELIN-1972](https://issues.apache.org/jira/browse/ZEPPELIN-1972) ### How should this be tested? Run ignite interpreter test and ignite sql interpreter test in the same thread ``` mvn test -pl ignite -am -Pscala-2.11 -Dtest=org.apache.zeppelin.ignite.IgniteInterpreterTest#testInterpret,org.apache.zeppelin.ignite.IgniteSqlInterpreterTest#testSql -DfailIfNoTests=false ``` ### Questions: * Does the licenses files need update? No * Is there breaking changes for older versions? No * Does this needs documentation? No Author: Igor Drozdov Closes #1911 from DrIgor/ZEPPELIN-1972 and squashes the following commits: 35c5abd [Igor Drozdov] Preserve context classloader (cherry picked from commit 859d175) --- .../zeppelin/ignite/IgniteInterpreter.java | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/ignite/src/main/java/org/apache/zeppelin/ignite/IgniteInterpreter.java b/ignite/src/main/java/org/apache/zeppelin/ignite/IgniteInterpreter.java index d54152a904a..71b58595d6c 100644 --- a/ignite/src/main/java/org/apache/zeppelin/ignite/IgniteInterpreter.java +++ b/ignite/src/main/java/org/apache/zeppelin/ignite/IgniteInterpreter.java @@ -216,15 +216,20 @@ private Ignite getIgnite() { } private void initIgnite() { - imain.interpret("@transient var _binder = new java.util.HashMap[String, Object]()"); - Map binder = (Map) getLastObject(); + ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); + try { + imain.interpret("@transient var _binder = new java.util.HashMap[String, Object]()"); + Map binder = (Map) getLastObject(); - if (getIgnite() != null) { - binder.put("ignite", ignite); + if (getIgnite() != null) { + binder.put("ignite", ignite); - imain.interpret("@transient val ignite = " - + "_binder.get(\"ignite\")" - + ".asInstanceOf[org.apache.ignite.Ignite]"); + imain.interpret("@transient val ignite = " + + "_binder.get(\"ignite\")" + + ".asInstanceOf[org.apache.ignite.Ignite]"); + } + } finally { + Thread.currentThread().setContextClassLoader(contextClassLoader); } } @@ -295,11 +300,14 @@ private InterpreterResult interpret(String[] lines) { } } + ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); try { code = getResultCode(imain.interpret(incomplete + s)); } catch (Exception e) { logger.info("Interpreter exception", e); return new InterpreterResult(Code.ERROR, InterpreterUtils.getMostRelevantMessage(e)); + } finally { + Thread.currentThread().setContextClassLoader(contextClassLoader); } if (code == Code.ERROR) { From 380432c4cc1cfa0f2b49ddbd06c7c463fe1d633a Mon Sep 17 00:00:00 2001 From: z0621 Date: Mon, 24 Apr 2017 09:42:21 +0800 Subject: [PATCH 3/3] [HOTFIX] Fix CI build failure on branch-0.6 --- .travis.yml | 34 +++---------- travis_check.py | 125 ++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 132 insertions(+), 27 deletions(-) create mode 100644 travis_check.py diff --git a/.travis.yml b/.travis.yml index 680a9f59d6e..75d8ac217ff 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,52 +28,32 @@ addons: - r-packages-precise packages: - r-base-dev - - r-cran-evaluate - - r-cran-base64enc matrix: include: # Test all modules with spark 2.0.0 and scala 2.11 - jdk: "oraclejdk7" - env: SCALA_VER="2.11" SPARK_VER="2.0.0" HADOOP_VER="2.3" PROFILE="-Pspark-2.0 -Phadoop-2.3 -Ppyspark -Psparkr -Pscalding -Pscala-2.11" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="" + env: SCALA_VER="2.11" SPARK_VER="2.0.2" HADOOP_VER="2.6" PROFILE="-Pspark-2.0 -Phadoop-2.6 -Ppyspark -Psparkr -Pscalding -Pscala-2.11" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="" # Test all modules with scala 2.10 - jdk: "oraclejdk7" - env: SCALA_VER="2.10" SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Pr -Phadoop-2.3 -Ppyspark -Psparkr -Pscalding -Pscala-2.10" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="" + env: SCALA_VER="2.10" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Pr -Phadoop-2.6 -Ppyspark -Psparkr -Pscalding -Pscala-2.10" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="" # Test all modules with scala 2.11 - jdk: "oraclejdk7" - env: SCALA_VER="2.11" SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Pr -Phadoop-2.3 -Ppyspark -Psparkr -Pscalding -Pscala-2.11" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="" + env: SCALA_VER="2.11" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Pr -Phadoop-2.6 -Ppyspark -Psparkr -Pscalding -Pscala-2.11" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="" - # Test spark module for 1.5.2 + # Test selenium with spark module for 1.6.3 - jdk: "oraclejdk7" - env: SCALA_VER="2.10" SPARK_VER="1.5.2" HADOOP_VER="2.3" PROFILE="-Pspark-1.5 -Pr -Phadoop-2.3 -Ppyspark -Psparkr" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark,r -Dtest=org.apache.zeppelin.rest.*Test,org.apache.zeppelin.spark* -DfailIfNoTests=false" - - # Test spark module for 1.4.1 - - jdk: "oraclejdk7" - env: SCALA_VER="2.10" SPARK_VER="1.4.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.4 -Pr -Phadoop-2.3 -Ppyspark -Psparkr" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark,r -Dtest=org.apache.zeppelin.rest.*Test,org.apache.zeppelin.spark* -DfailIfNoTests=false" - - # Test spark module for 1.3.1 - - jdk: "oraclejdk7" - env: SCALA_VER="2.10" SPARK_VER="1.3.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.3 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark -Dtest=org.apache.zeppelin.rest.*Test,org.apache.zeppelin.spark* -DfailIfNoTests=false" - - # Test spark module for 1.2.2 - - jdk: "oraclejdk7" - env: SCALA_VER="2.10" SPARK_VER="1.2.2" HADOOP_VER="2.3" PROFILE="-Pspark-1.2 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark -Dtest=org.apache.zeppelin.rest.*Test,org.apache.zeppelin.spark* -DfailIfNoTests=false" - - # Test spark module for 1.1.1 - - jdk: "oraclejdk7" - env: SCALA_VER="2.10" SPARK_VER="1.1.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.1 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark -Dtest=org.apache.zeppelin.rest.*Test,org.apache.zeppelin.spark* -DfailIfNoTests=false" - - # Test selenium with spark module for 1.6.1 - - jdk: "oraclejdk7" - env: TEST_SELENIUM="true" SCALA_VER="2.10" SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Phadoop-2.3 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark -Dtest=org.apache.zeppelin.AbstractFunctionalSuite -DfailIfNoTests=false" + env: TEST_SELENIUM="true" SCALA_VER="2.10" SPARK_VER="1.6.3" HADOOP_VER="2.6" PROFILE="-Pspark-1.6 -Phadoop-2.6 -Ppyspark" BUILD_FLAG="package -DskipTests" TEST_FLAG="verify" TEST_PROJECTS="-pl zeppelin-interpreter,zeppelin-zengine,zeppelin-server,zeppelin-display,spark-dependencies,spark -Dtest=org.apache.zeppelin.AbstractFunctionalSuite -DfailIfNoTests=false" before_install: - "ls -la .spark-dist ${HOME}/.m2/repository/.cache/maven-download-plugin" - mkdir -p ~/R - echo 'R_LIBS=~/R' > ~/.Renviron - R -e "install.packages('knitr', repos = 'http://cran.us.r-project.org', lib='~/R')" + - R -e "install.packages('evaluate', repos = 'http://cran.us.r-project.org', lib='~/R')" + - R -e "install.packages('base64enc', repos = 'http://cran.us.r-project.org', lib='~/R')" - export R_LIBS='~/R' - "/sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -ac -screen 0 1600x1024x16" - ./dev/change_scala_version.sh $SCALA_VER diff --git a/travis_check.py b/travis_check.py new file mode 100644 index 00000000000..cbf9623dba5 --- /dev/null +++ b/travis_check.py @@ -0,0 +1,125 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# +# This script checks build status of given pullrequest identified by author and commit hash. +# +# usage) +# python travis_check.py [author] [commit hash] [check interval (optional)] +# +# example) +# # full hash +# python travis_check.py Leemoonsoo 1f2549a38f440ebfbfe2d32a041684e3e39b496c +# +# # with short hash +# python travis_check.py Leemoonsoo 1f2549a +# +# # with custom check interval +# python travis_check.py Leemoonsoo 1f2549a 5,60,60 + +import os, sys, getopt, traceback, json, requests, time + +author = sys.argv[1] +commit = sys.argv[2] + +# check interval in sec +check = [5, 60, 300, 300, 300, 300, 300, 300, 300, 300, 300, 300, 600, 600, 600, 600, 600, 600] + +if len(sys.argv) > 3: + check = map(lambda x: int(x), sys.argv[3].split(",")) + +def info(msg): + print("[" + time.strftime("%Y-%m-%d %H:%M:%S") + "] " + msg) + sys.stdout.flush() + +info("Author: " + author + ", commit: " + commit) + + +def getBuildStatus(author, commit): + travisApi = "https://api.travis-ci.org/" + + # get latest 25 builds + resp = requests.get(url=travisApi + "/repos/" + author + "/zeppelin/builds") + data = json.loads(resp.text) + build = None + + if len(data) == 0: + return build; + + for b in data: + if b["commit"][:len(commit)] == commit: + resp = requests.get(url=travisApi + "/repos/" + author + "/zeppelin/builds/" + str(b["id"])) + build = json.loads(resp.text) + break + + return build + +def status(index, msg, jobId): + return '{:20}'.format("[" + str(index+1) + "] " + msg) + "https://travis-ci.org/" + author + "/zeppelin/jobs/" + str(jobId) + +def printBuildStatus(build): + failure = 0 + running = 0 + + for index, job in enumerate(build["matrix"]): + result = job["result"] + jobId = job["id"] + + if job["started_at"] == None and result == None: + print(status(index, "Not started", jobId)) + running = running + 1 + elif job["started_at"] != None and job["finished_at"] == None: + print(status(index, "Running ...", jobId)) + running = running + 1 + elif job["started_at"] != None and job["finished_at"] != None: + if result == None: + print(status(index, "Not completed", jobId)) + failure = failure + 1 + elif result == 0: + print(status(index, "OK", jobId)) + else: + print(status(index, "Error " + str(result), jobId)) + failure = failure + 1 + else: + print(status(index, "Unknown state", jobId)) + failure = failure + 1 + + return failure, running + + +for sleep in check: + info("--------------------------------") + time.sleep(sleep); + info("Get build status ...") + build = getBuildStatus(author, commit) + if build == None: + info("Can't find build for commit " + commit + " from " + author) + sys.exit(2) + + print("Build https://travis-ci.org/" + author + "/zeppelin/builds/" + str(build["id"])) + failure, running = printBuildStatus(build) + + print(str(failure) + " job(s) failed, " + str(running) + " job(s) running/pending") + + if failure != 0: + sys.exit(1) + + if failure == 0 and running == 0: + info("CI Green!") + sys.exit(0) + +info("Timeout") +sys.exit(1)