From fff4f96dc0e2e43721f9aca3649a342fcd072ca9 Mon Sep 17 00:00:00 2001 From: Jeff Zhang Date: Tue, 31 Jul 2018 09:04:57 +0800 Subject: [PATCH] ZEPPELIN-3668. Can't hide Spark Jobs (Spark UI) button --- .../spark/NewSparkInterpreterTest.java | 42 ++++++++++++++++--- .../apache/zeppelin/spark/SparkShimsTest.java | 5 +-- .../org/apache/zeppelin/spark/SparkShims.java | 22 ++++------ .../apache/zeppelin/spark/Spark1Shims.java | 2 +- .../apache/zeppelin/spark/Spark2Shims.java | 4 +- 5 files changed, 50 insertions(+), 25 deletions(-) diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java index 48be45b62b4..6071bb3073c 100644 --- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java +++ b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java @@ -34,6 +34,7 @@ import org.apache.zeppelin.interpreter.remote.RemoteInterpreterEventClient; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.junit.After; +import org.junit.Before; import org.junit.Ignore; import org.junit.Test; @@ -44,7 +45,6 @@ import java.nio.channels.Channels; import java.nio.channels.ReadableByteChannel; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; @@ -53,6 +53,7 @@ import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; @@ -66,7 +67,7 @@ public class NewSparkInterpreterTest { // catch the interpreter output in onUpdate private InterpreterResultMessageOutput messageOutput; - private RemoteInterpreterEventClient mockRemoteEventClient = mock(RemoteInterpreterEventClient.class); + private RemoteInterpreterEventClient mockRemoteEventClient; @Test public void testSparkInterpreter() throws IOException, InterruptedException, InterpreterException { @@ -163,7 +164,7 @@ public void testSparkInterpreter() throws IOException, InterruptedException, Int result = interpreter.interpret( "case class Bank(age:Integer, job:String, marital : String, education : String, balance : Integer)\n" + - "val bank = bankText.map(s=>s.split(\";\")).filter(s => s(0)!=\"\\\"age\\\"\").map(\n" + + "val bank = bankText.map(s=>s.split(\";\")).filter(s => s(0)!=\"\\\"age\\\"\").map(\n" + " s => Bank(s(0).toInt, \n" + " s(1).replaceAll(\"\\\"\", \"\"),\n" + " s(2).replaceAll(\"\\\"\", \"\"),\n" + @@ -188,7 +189,7 @@ public void testSparkInterpreter() throws IOException, InterruptedException, Int "df.show()", getInterpreterContext()); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertTrue(output.contains( - "+---+----+\n" + + "+---+----+\n" + "| _1| _2|\n" + "+---+----+\n" + "| 1| a|\n" + @@ -203,7 +204,7 @@ public void testSparkInterpreter() throws IOException, InterruptedException, Int "df.show()", getInterpreterContext()); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); assertTrue(output.contains( - "+---+----+\n" + + "+---+----+\n" + "| _1| _2|\n" + "+---+----+\n" + "| 1| a|\n" + @@ -318,7 +319,7 @@ public void run() { interpretThread.start(); boolean nonZeroProgress = false; int progress = 0; - while(interpretThread.isAlive()) { + while (interpretThread.isAlive()) { progress = interpreter.getProgress(context2); assertTrue(progress >= 0); if (progress != 0 && progress != 100) { @@ -463,6 +464,35 @@ public void testSchedulePool() throws InterpreterException { assertEquals(null, interpreter.getSparkContext().getLocalProperty("spark.scheduler.pool")); } + @Test + public void testDisableSparkUI() throws InterpreterException { + Properties properties = new Properties(); + properties.setProperty("spark.master", "local"); + properties.setProperty("spark.app.name", "test"); + properties.setProperty("zeppelin.spark.maxResult", "100"); + properties.setProperty("zeppelin.spark.test", "true"); + properties.setProperty("zeppelin.spark.useNew", "true"); + properties.setProperty("spark.ui.enabled", "false"); + + interpreter = new SparkInterpreter(properties); + assertTrue(interpreter.getDelegation() instanceof NewSparkInterpreter); + interpreter.setInterpreterGroup(mock(InterpreterGroup.class)); + InterpreterContext.set(getInterpreterContext()); + interpreter.open(); + + InterpreterContext context = getInterpreterContext(); + InterpreterResult result = interpreter.interpret("sc.range(1, 10).sum", context); + assertEquals(InterpreterResult.Code.SUCCESS, result.code()); + + // spark job url is not sent + verify(mockRemoteEventClient, never()).onParaInfosReceived(any(Map.class)); + } + + @Before + public void setUp() { + mockRemoteEventClient = mock(RemoteInterpreterEventClient.class); + } + @After public void tearDown() throws InterpreterException { if (this.interpreter != null) { diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkShimsTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkShimsTest.java index ccebac34275..fd47ce263d9 100644 --- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkShimsTest.java +++ b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkShimsTest.java @@ -108,7 +108,6 @@ public String showDataFrame(Object obj, int maxResult) { @PrepareForTest({BaseZeppelinContext.class, VersionInfo.class}) @PowerMockIgnore({"javax.net.*", "javax.security.*"}) public static class SingleTests { - @Mock Properties mockProperties; @Captor ArgumentCaptor> argumentCaptor; SparkShims sparkShims; @@ -130,7 +129,7 @@ public void setUp() { @Test public void runUnderLocalTest() { - sparkShims.buildSparkJobUrl("local", "http://sparkurl", 0, mockProperties, mockContext); + sparkShims.buildSparkJobUrl("local", "http://sparkurl", 0, mockContext); Map mapValue = argumentCaptor.getValue(); assertTrue(mapValue.keySet().contains("jobUrl")); @@ -140,7 +139,7 @@ public void runUnderLocalTest() { @Test public void runUnderYarnTest() { - sparkShims.buildSparkJobUrl("yarn", "http://sparkurl", 0, mockProperties, mockContext); + sparkShims.buildSparkJobUrl("yarn", "http://sparkurl", 0, mockContext); Map mapValue = argumentCaptor.getValue(); assertTrue(mapValue.keySet().contains("jobUrl")); diff --git a/spark/spark-shims/src/main/scala/org/apache/zeppelin/spark/SparkShims.java b/spark/spark-shims/src/main/scala/org/apache/zeppelin/spark/SparkShims.java index d3087622807..1287c576ab5 100644 --- a/spark/spark-shims/src/main/scala/org/apache/zeppelin/spark/SparkShims.java +++ b/spark/spark-shims/src/main/scala/org/apache/zeppelin/spark/SparkShims.java @@ -104,26 +104,20 @@ protected String getParagraphId(String jobgroupId) { protected void buildSparkJobUrl(String master, String sparkWebUrl, int jobId, - Properties jobProperties, InterpreterContext context) { - String uiEnabled = jobProperties.getProperty("spark.ui.enabled"); String jobUrl = sparkWebUrl + "/jobs/job?id=" + jobId; - // Button visible if Spark UI property not set, set as invalid boolean or true - boolean showSparkUI = - uiEnabled == null || !uiEnabled.trim().toLowerCase().equals("false"); String version = VersionInfo.getVersion(); if (master.toLowerCase().contains("yarn") && !supportYarn6615(version)) { jobUrl = sparkWebUrl + "/jobs"; } - if (showSparkUI && jobUrl != null) { - Map infos = new java.util.HashMap(); - infos.put("jobUrl", jobUrl); - infos.put("label", "SPARK JOB"); - infos.put("tooltip", "View in Spark web UI"); - infos.put("noteId", context.getNoteId()); - infos.put("paraId", context.getParagraphId()); - context.getIntpEventClient().onParaInfosReceived(infos); - } + + Map infos = new java.util.HashMap(); + infos.put("jobUrl", jobUrl); + infos.put("label", "SPARK JOB"); + infos.put("tooltip", "View in Spark web UI"); + infos.put("noteId", context.getNoteId()); + infos.put("paraId", context.getParagraphId()); + context.getIntpEventClient().onParaInfosReceived(infos); } /** diff --git a/spark/spark1-shims/src/main/scala/org/apache/zeppelin/spark/Spark1Shims.java b/spark/spark1-shims/src/main/scala/org/apache/zeppelin/spark/Spark1Shims.java index db0727c9a9f..cf8bb230723 100644 --- a/spark/spark1-shims/src/main/scala/org/apache/zeppelin/spark/Spark1Shims.java +++ b/spark/spark1-shims/src/main/scala/org/apache/zeppelin/spark/Spark1Shims.java @@ -38,7 +38,7 @@ public void setupSparkListener(final String master, sc.addSparkListener(new JobProgressListener(sc.getConf()) { @Override public void onJobStart(SparkListenerJobStart jobStart) { - buildSparkJobUrl(master, sparkWebUrl, jobStart.jobId(), jobStart.properties(), context); + buildSparkJobUrl(master, sparkWebUrl, jobStart.jobId(), context); } }); } diff --git a/spark/spark2-shims/src/main/scala/org/apache/zeppelin/spark/Spark2Shims.java b/spark/spark2-shims/src/main/scala/org/apache/zeppelin/spark/Spark2Shims.java index 177b0acb14b..5f0cf87e84a 100644 --- a/spark/spark2-shims/src/main/scala/org/apache/zeppelin/spark/Spark2Shims.java +++ b/spark/spark2-shims/src/main/scala/org/apache/zeppelin/spark/Spark2Shims.java @@ -39,7 +39,9 @@ public void setupSparkListener(final String master, sc.addSparkListener(new SparkListener() { @Override public void onJobStart(SparkListenerJobStart jobStart) { - buildSparkJobUrl(master, sparkWebUrl, jobStart.jobId(), jobStart.properties(), context); + if (sc.getConf().getBoolean("spark.ui.enabled", true)) { + buildSparkJobUrl(master, sparkWebUrl, jobStart.jobId(), context); + } } }); }