Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-32119][CORE] ExecutorPlugin doesn't work with Standalone Cluster and Kubernetes with --jars #28939

Closed
wants to merge 11 commits into from
Original file line number Diff line number Diff line change
Expand Up @@ -28,21 +28,24 @@

import org.apache.spark.api.java.*;
import org.apache.spark.*;
import org.apache.spark.util.Utils;

/**
* Java apps can use both Java-friendly JavaSparkContext and Scala SparkContext.
*/
public class JavaSparkContextSuite implements Serializable {

@Test
public void javaSparkContext() {
public void javaSparkContext() throws IOException {
File tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark");
String dummyJarFile = File.createTempFile(tempDir.toString(), "jarFile").toString();
String[] jars = new String[] {};
java.util.Map<String, String> environment = new java.util.HashMap<>();

new JavaSparkContext(new SparkConf().setMaster("local").setAppName("name")).stop();
new JavaSparkContext("local", "name", new SparkConf()).stop();
new JavaSparkContext("local", "name").stop();
new JavaSparkContext("local", "name", "sparkHome", "jarFile").stop();
new JavaSparkContext("local", "name", "sparkHome", dummyJarFile).stop();
new JavaSparkContext("local", "name", "sparkHome", jars).stop();
new JavaSparkContext("local", "name", "sparkHome", jars, environment).stop();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -568,7 +568,8 @@ class SparkSubmitSuite
}
}

val clArgs2 = Seq("--class", "org.SomeClass", "thejar.jar")
val dummyJarFile = TestUtils.createJarWithClasses(Seq.empty)
val clArgs2 = Seq("--class", "org.SomeClass", dummyJarFile.toString)
val appArgs2 = new SparkSubmitArguments(clArgs2)
val (_, _, conf2, _) = submit.prepareSubmitEnvironment(appArgs2)
assert(!conf2.contains(UI_SHOW_CONSOLE_PROGRESS))
Expand Down Expand Up @@ -1254,7 +1255,7 @@ class SparkSubmitSuite
|public void init(PluginContext ctx, Map<String, String> extraConf) {
| String str = null;
| try (BufferedReader reader =
| new BufferedReader(new InputStreamReader(new FileInputStream($tempFileName)))) {
| new BufferedReader(new InputStreamReader(new FileInputStream("$tempFileName")))) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh, interesting. Previously, this test case succeeds with "?

Copy link
Member Author

@sarutak sarutak Jun 29, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actually, I replaced "test.txt" with $tempFileName just before the first push so, it's a miss-replacement.

| str = reader.readLine();
| } catch (IOException e) {
| throw new RuntimeException(e);
Expand Down