Skip to content

Commit

Permalink
[SPARK-32175][SPARK-32175][FOLLOWUP] Remove flaky test added in
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?

This PR removes a test added in SPARK-32175(#29002).

### Why are the changes needed?

That test is flaky. It can be mitigated by increasing the timeout but it would rather be simpler to remove the test.
See also the [discussion](#29002 (comment)).

### Does this PR introduce _any_ user-facing change?

No.

Closes #29314 from sarutak/remove-flaky-test.

Authored-by: Kousuke Saruta <sarutak@oss.nttdata.com>
Signed-off-by: Kousuke Saruta <sarutak@oss.nttdata.com>
  • Loading branch information
sarutak committed Jul 31, 2020
1 parent 6032c5b commit 9d7b1d9
Showing 1 changed file with 0 additions and 67 deletions.
67 changes: 0 additions & 67 deletions core/src/test/scala/org/apache/spark/executor/ExecutorSuite.scala
Expand Up @@ -403,73 +403,6 @@ class ExecutorSuite extends SparkFunSuite
assert(taskMetrics.getMetricValue("JVMHeapMemory") > 0)
}

test("SPARK-32175: Plugin initialization should start after heartbeater started") {
withTempDir { tempDir =>
val sparkPluginCodeBody =
"""
|@Override
|public org.apache.spark.api.plugin.ExecutorPlugin executorPlugin() {
| return new TestExecutorPlugin();
|}
|
|@Override
|public org.apache.spark.api.plugin.DriverPlugin driverPlugin() { return null; }
""".stripMargin
val executorPluginBody =
"""
|@Override
|public void init(
| org.apache.spark.api.plugin.PluginContext ctx,
| java.util.Map<String, String> extraConf) {
| try {
| Thread.sleep(8 * 1000);
| } catch (InterruptedException e) {
| throw new RuntimeException(e);
| }
|}
""".stripMargin

val compiledExecutorPlugin = TestUtils.createCompiledClass(
"TestExecutorPlugin",
tempDir,
"",
null,
Seq.empty,
Seq("org.apache.spark.api.plugin.ExecutorPlugin"),
executorPluginBody)

val thisClassPath =
sys.props("java.class.path").split(File.pathSeparator).map(p => new File(p).toURI.toURL)
val compiledSparkPlugin = TestUtils.createCompiledClass(
"TestSparkPlugin",
tempDir,
"",
null,
Seq(tempDir.toURI.toURL) ++ thisClassPath,
Seq("org.apache.spark.api.plugin.SparkPlugin"),
sparkPluginCodeBody)

val jarUrl = TestUtils.createJar(
Seq(compiledSparkPlugin, compiledExecutorPlugin),
new File(tempDir, "testPlugin.jar"))

val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
val args = Seq(
"--class", SimpleApplicationTest.getClass.getName.stripSuffix("$"),
"--name", "testApp",
"--master", "local-cluster[1,1,1024]",
"--conf", "spark.plugins=TestSparkPlugin",
"--conf", "spark.storage.blockManagerSlaveTimeoutMs=" + 5 * 1000,
"--conf", "spark.network.timeoutInterval=" + 1000,
"--conf", "spark.executor.heartbeatInterval=" + 1000,
"--conf", "spark.executor.extraClassPath=" + jarUrl.toString,
"--conf", "spark.driver.extraClassPath=" + jarUrl.toString,
"--conf", "spark.ui.enabled=false",
unusedJar.toString)
SparkSubmitSuite.runSparkSubmit(args, timeout = 30.seconds)
}
}

private def createMockEnv(conf: SparkConf, serializer: JavaSerializer): SparkEnv = {
val mockEnv = mock[SparkEnv]
val mockRpcEnv = mock[RpcEnv]
Expand Down

0 comments on commit 9d7b1d9

Please sign in to comment.