-
Notifications
You must be signed in to change notification settings - Fork 1.7k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[Fix] Fix spark/flink starter script error on windows #6435
Changes from 4 commits
8e08b31
5ac6f7e
7334d51
8216b25
2c084e3
fb1dead
283f37d
4a76471
394f2fd
951028a
655ae6b
c5fd966
9e1a42c
7bad23d
ca4c88f
37994c9
b1d904c
88eaa44
6b5429e
d1199de
f364600
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
package org.apache.seatunnel.core.starter.utils; | ||
|
||
import org.apache.commons.lang3.SystemUtils; | ||
|
||
public class SystemUtil { | ||
|
||
public static String GetOsType() { | ||
String os_type=""; | ||
|
||
if (SystemUtils.IS_OS_WINDOWS) { | ||
os_type="Windows"; | ||
} else if (SystemUtils.IS_OS_MAC) { | ||
os_type="Mac"; | ||
} else if (SystemUtils.IS_OS_LINUX) { | ||
os_type="Linux"; | ||
} else if (SystemUtils.IS_OS_SOLARIS) { | ||
os_type="Solaris"; | ||
} else { | ||
os_type="Unknown"; | ||
} | ||
|
||
return os_type; | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -23,6 +23,8 @@ | |
import org.apache.seatunnel.core.starter.flink.args.FlinkCommandArgs; | ||
import org.apache.seatunnel.core.starter.utils.CommandLineUtils; | ||
|
||
import org.apache.seatunnel.core.starter.utils.SystemUtil; | ||
|
||
import java.util.ArrayList; | ||
import java.util.List; | ||
import java.util.Objects; | ||
|
@@ -44,6 +46,7 @@ public class FlinkStarter implements Starter { | |
this.appJar = Common.appStarterDir().resolve(APP_JAR_NAME).toString(); | ||
} | ||
|
||
@SuppressWarnings("checkstyle:RegexpSingleline") | ||
public static void main(String[] args) { | ||
FlinkStarter flinkStarter = new FlinkStarter(args); | ||
System.out.println(String.join(" ", flinkStarter.buildCommands())); | ||
|
@@ -52,8 +55,33 @@ public static void main(String[] args) { | |
@Override | ||
public List<String> buildCommands() { | ||
List<String> command = new ArrayList<>(); | ||
String local_os_type=""; | ||
|
||
SystemUtil my_system_util=new SystemUtil(); | ||
local_os_type=my_system_util.GetOsType(); | ||
// debug | ||
// System.out.println("OS type:"+local_os_type); | ||
|
||
String cmd_flink=""; | ||
|
||
// Nothe that "flink.cmd” or "flink.bat" can be retrieved from lower version of flink (e.g. 1.0.9) | ||
// We do not check if this file exists on the box, user needs to make sure this file exists or not. | ||
if (local_os_type.toLowerCase().equals("windows")) { | ||
cmd_flink="%FLINK_HOME%/bin/flink.cmd"; | ||
} else if (local_os_type.toLowerCase().equals("linux")) { | ||
cmd_flink="${FLINK_HOME}/bin/flink"; | ||
} else if (local_os_type.toLowerCase().equals("unknown")) { | ||
cmd_flink="error"; | ||
} | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ditto There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. fixed . |
||
|
||
// set start command | ||
command.add("${FLINK_HOME}/bin/flink"); | ||
if ( ! (cmd_flink.equals("error"))) { | ||
command.add(cmd_flink); | ||
} else { | ||
System.out.println("Error: Can not determine OS type, abort run !"); | ||
System.exit(-1); | ||
} | ||
|
||
// set deploy mode, run or run-application | ||
command.add(flinkCommandArgs.getDeployMode().getDeployMode()); | ||
// set submitted target master | ||
|
@@ -91,6 +119,8 @@ public List<String> buildCommands() { | |
.filter(Objects::nonNull) | ||
.map(String::trim) | ||
.forEach(variable -> command.add("-D" + variable)); | ||
// debug | ||
// System.out.println("Whole command string:" + command.toString()); | ||
return command; | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -57,6 +57,8 @@ | |
import java.util.stream.Collectors; | ||
import java.util.stream.Stream; | ||
|
||
import org.apache.seatunnel.core.starter.utils.SystemUtil; | ||
|
||
/** A Starter to generate spark-submit command for SeaTunnel job on spark. */ | ||
public class SparkStarter implements Starter { | ||
|
||
|
@@ -80,6 +82,7 @@ private SparkStarter(String[] args, SparkCommandArgs commandArgs) { | |
this.commandArgs = commandArgs; | ||
} | ||
|
||
@SuppressWarnings("checkstyle:RegexpSingleline") | ||
public static void main(String[] args) throws IOException { | ||
SparkStarter starter = getInstance(args); | ||
List<String> command = starter.buildCommands(); | ||
|
@@ -169,9 +172,9 @@ static Map<String, String> getSparkConf(String configFile) throws FileNotFoundEx | |
Map.Entry::getKey, e -> e.getValue().unwrapped().toString())); | ||
} | ||
|
||
/** return connector's jars, which located in 'connectors/*'. */ | ||
/** return connector's jars, which located in 'connectors/spark/*'. */ | ||
private List<Path> getConnectorJarDependencies() { | ||
Path pluginRootDir = Common.connectorDir(); | ||
Path pluginRootDir = Common.connectorJarDir("seatunnel"); | ||
if (!Files.exists(pluginRootDir) || !Files.isDirectory(pluginRootDir)) { | ||
return Collections.emptyList(); | ||
} | ||
|
@@ -195,7 +198,30 @@ private List<Path> getConnectorJarDependencies() { | |
/** build final spark-submit commands */ | ||
protected List<String> buildFinal() { | ||
List<String> commands = new ArrayList<>(); | ||
commands.add("${SPARK_HOME}/bin/spark-submit"); | ||
String local_os_type=""; | ||
|
||
SystemUtil my_system_util=new SystemUtil(); | ||
local_os_type=my_system_util.GetOsType(); | ||
// debug | ||
// System.out.println("OS type:"+local_os_type); | ||
|
||
String cmd_spark=""; | ||
|
||
if (local_os_type.toLowerCase().equals("windows")) { | ||
cmd_spark="%SPARK_HOME%/bin/spark-submit.cmd"; | ||
} else if (local_os_type.toLowerCase().equals("linux")) { | ||
cmd_spark="${SPARK_HOME}/bin/spark-submit"; | ||
} else if (local_os_type.toLowerCase().equals("unknown")) { | ||
cmd_spark="error"; | ||
} | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ditto There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. fixed. |
||
|
||
if ( ! (cmd_spark.equals("error"))) { | ||
commands.add(cmd_spark); | ||
} else { | ||
System.out.println("Error: Can not determine OS type, abort run !"); | ||
System.exit(-1); | ||
} | ||
|
||
appendOption(commands, "--class", SeaTunnelSpark.class.getName()); | ||
appendOption(commands, "--name", this.commandArgs.getJobName()); | ||
appendOption(commands, "--master", this.commandArgs.getMaster()); | ||
|
@@ -217,6 +243,9 @@ protected List<String> buildFinal() { | |
if (this.commandArgs.isCheckConfig()) { | ||
commands.add("--check"); | ||
} | ||
|
||
// debug | ||
// System.out.println("Whole spark job command string:" + commands.toString()); | ||
return commands; | ||
} | ||
|
||
|
@@ -259,6 +288,7 @@ protected void appendAppJar(List<String> commands) { | |
Common.appStarterDir().resolve(EngineType.SPARK3.getStarterJarName()).toString()); | ||
} | ||
|
||
@SuppressWarnings("checkstyle:Indentation") | ||
private List<PluginIdentifier> getPluginIdentifiers(Config config, PluginType... pluginTypes) { | ||
return Arrays.stream(pluginTypes) | ||
.flatMap( | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
switch?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Do you mean change the code to the code style "switch ... case" ? If this is true, I'll modify as you suggested.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I don't think "switch... case" is needed here, because there are many different properties from SystemUtils(e.g. IS_OS_LINUX, IS_OS_WINDOWS,,,etc), if SystemUtils can return unique property "OS_TYPE" then we can change to Switch... case style easily.