Skip to content

Commit

Permalink
Merge pull request #272 from peacewong/dev-1.1.1
Browse files Browse the repository at this point in the history
update 1.1.1 manual
  • Loading branch information
casionone committed May 25, 2022
2 parents 2cfc543 + 708a2aa commit afcca39
Show file tree
Hide file tree
Showing 6 changed files with 33 additions and 21 deletions.
6 changes: 6 additions & 0 deletions docs/deployment/quick_deploy.md
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,12 @@ cp mysql-connector-java-5.1.49.jar {LINKIS_HOME}/lib/linkis-commons/public-modul

### 5. Linkis quick startup

**Notice** that if you use dss or other projects that rely on linkis version < 1.1.1, you also need to modify the linkis.properties file:
```shell
echo "wds.linkis.session.ticket.key=bdp-user-ticket-id" >> linkis.properties
```


(1). Start services

Run the following commands on the installation directory to start all services.
Expand Down
4 changes: 2 additions & 2 deletions docs/development/linkis_config.md
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,8 @@ Note: When submitting client parameters, only engine-related parameters, tag par
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // Specify engine type and version
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// Specify the running user and your APPName
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "sql"); // Specify the type of script to run: spark supports: sql, scala, py; Hive: hql; shell: sh; python: python; presto: psql
labels.put(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, "10000");//The job runs for 10s and automatically initiates Kill, the unit is ms
labels.put(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, "10000");//The job is queued for more than 10s and automatically initiates Kill, the unit is ms
labels.put(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, "10000");//The job runs for 10s and automatically initiates Kill, the unit is s
labels.put(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, "10000");//The job is queued for more than 10s and automatically initiates Kill, the unit is s
labels.put(LabelKeyConstant.RETRY_TIMEOUT_KEY, "10000");//The waiting time for the job to retry due to resources and other reasons, the unit is ms. If it fails due to insufficient queue resources, it will initiate 10 retries at intervals by default
labels.put(LabelKeyConstant.TENANT_KEY,"hduser02");//Tenant label, if the tenant parameter is specified for the task, the task will be routed to a separate ECM machine
labels.put(LabelKeyConstant.EXECUTE_ONCE_KEY,"");//Execute the label once, this parameter is not recommended to be set. After setting, the engine will not reuse the task and the engine will end after running. Only a certain task parameter can be specialized. set up
Expand Down
12 changes: 6 additions & 6 deletions docs/user_guide/sdk_manual.md
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ public class LinkisClientTest {
// set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant
Map<String, Object> labels = new HashMap<String, Object>();
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // required engineType Label
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// required execute user and creator
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-APPName");// required execute user and creator
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // required codeType
// set start up map :engineConn start params
Map<String, Object> startupMap = new HashMap<String, Object>(16);
Expand Down Expand Up @@ -155,7 +155,7 @@ public class LinkisClientTest {

// 2. build JobExecuteAction (0.X old way of using)
JobExecuteAction executionAction = JobExecuteAction.builder()
.setCreator("IDE") //creator, the system name of the client requesting linkis, used for system-level isolation
.setCreator("APPName") //creator, the system name of the client requesting linkis, used for system-level isolation
.addExecuteCode(code) //Execution Code
.setEngineTypeStr("spark") // engineConn type
.setRunTypeStr("py") // code type
Expand Down Expand Up @@ -275,14 +275,14 @@ object LinkisClientTest {
// set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant
val labels: util.Map[String, Any] = new util.HashMap[String, Any]
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // required engineType Label
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// required execute user and creator
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-APPName");// required execute user and creator
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // required codeType

val startupMap = new java.util.HashMap[String, Any]()
// Support setting engine native parameters,For example: parameters of engines such as spark/hive
startupMap.put("spark.executor.instances", 2);
// setting linkis params
startupMap.put("wds.linkis.rm.yarnqueue", "dws");
startupMap.put("wds.linkis.rm.yarnqueue", "default");
// 2. build jobSubmitAction
val jobSubmitAction = JobSubmitAction.builder
.addExecuteCode(code)
Expand All @@ -309,10 +309,10 @@ object LinkisClientTest {
// Support setting engine native parameters,For example: parameters of engines such as spark/hive
startupMap.put("spark.executor.instances", 2)
// setting linkis params
startupMap.put("wds.linkis.rm.yarnqueue", "dws")
startupMap.put("wds.linkis.rm.yarnqueue", "default")
// 2. build JobExecuteAction (0.X old way of using)
val executionAction = JobExecuteAction.builder()
.setCreator("IDE") //creator, the system name of the client requesting linkis, used for system-level isolation
.setCreator("APPName") //creator, the system name of the client requesting linkis, used for system-level isolation
.addExecuteCode(code) //Execution Code
.setEngineTypeStr("spark") // engineConn type
.setRunTypeStr("py") // code type
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,11 @@ cp mysql-connector-java-5.1.49.jar {LINKIS_HOME}/lib/linkis-commons/public-modu

### 4.5 快速启动Linkis

**注意** 如果您用的dss或者其他项目依赖的linkis版本<1.1.1,还需要修改linkis.properties文件:
```shell
echo "wds.linkis.session.ticket.key=bdp-user-ticket-id" >> linkis.properties
```

#### 4.5.1 启动服务:

在安装目录执行以下命令,启动所有服务:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,8 @@ linkis-cli -runtieMap key1=value -runtieMap key2=value
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // 指定引擎类型和版本
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// 指定运行的用户和您的APPName
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "sql"); // 指定运行的脚本类型:spark支持:sql、scala、py;Hive:hql;shell:sh;python:python;presto:psql
labels.put(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, "10000");//job运行10s没完成自动发起Kill,单位为ms
labels.put(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, "10000");//job排队超过10s没完成自动发起Kill,单位为ms
labels.put(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, "10000");//job运行10s没完成自动发起Kill,单位为s
labels.put(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, "10000");//job排队超过10s没完成自动发起Kill,单位为s
labels.put(LabelKeyConstant.RETRY_TIMEOUT_KEY, "10000");//job因为资源等原因失败重试的等待时间,单位为ms,如因为队列资源不足的失败,会默认按间隔发起10次重试
labels.put(LabelKeyConstant.TENANT_KEY,"hduser02");//租户标签,任务如果指定了租户参数则任务会被路由到单独的ECM机器
labels.put(LabelKeyConstant.EXECUTE_ONCE_KEY,"");//执行一次标签,该参数不建议设置,设置后引擎不会复用任务运行完就会结束引擎,只有某个任务参数有特殊化的可以进行设置
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public class LinkisClientTest {
.readTimeout(30000) //set read timeout
.setAuthenticationStrategy(new StaticAuthenticationStrategy()) //AuthenticationStrategy Linkis authen suppory static and Token
.setAuthTokenKey("hadoop") // set submit user
.setAuthTokenValue("hadoop"))) // set passwd or token (setAuthTokenValue("BML-AUTH"))
.setAuthTokenValue("hadoop"))) // set passwd or token (setAuthTokenValue("test"))
.setDWSVersion("v1") //linkis rest version v1
.build();

Expand All @@ -72,9 +72,9 @@ public class LinkisClientTest {
try {

System.out.println("user : " + user + ", code : [" + executeCode + "]");
// 3. build job and execute
// 3.推荐用submit的方式,可以指定任务相关的label支持更多特性
JobExecuteResult jobExecuteResult = toSubmit(user, executeCode);
//0.x:JobExecuteResult jobExecuteResult = toExecute(user, executeCode);
//0.x兼容的方式,不推荐使用:JobExecuteResult jobExecuteResult = toExecute(user, executeCode);
System.out.println("execId: " + jobExecuteResult.getExecID() + ", taskId: " + jobExecuteResult.taskID());
// 4. get job jonfo
JobInfoResult jobInfoResult = client.getJobInfo(jobExecuteResult);
Expand Down Expand Up @@ -115,8 +115,8 @@ public class LinkisClientTest {
// set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant
Map<String, Object> labels = new HashMap<String, Object>();
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // required engineType Label
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// required execute user and creator
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // required codeType
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-APPName");// 请求的用户和应用名,两个参数都不能少,其中APPName不能带有"-"建议替换为"_"
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // 指定脚本类型
// set start up map :engineConn start params
Map<String, Object> startupMap = new HashMap<String, Object>(16);
// Support setting engine native parameters,For example: parameters of engines such as spark/hive
Expand Down Expand Up @@ -153,7 +153,7 @@ public class LinkisClientTest {

// 2. build JobExecuteAction (0.X old way of using)
JobExecuteAction executionAction = JobExecuteAction.builder()
.setCreator("IDE") //creator, the system name of the client requesting linkis, used for system-level isolation
.setCreator("AppName") //creator, the system name of the client requesting linkis, used for system-level isolation
.addExecuteCode(code) //Execution Code
.setEngineTypeStr("spark") // engineConn type
.setRunTypeStr("py") // code type
Expand Down Expand Up @@ -217,6 +217,7 @@ object LinkisClientTest {
try {
// 3. build job and execute
println("user : " + user + ", code : [" + executeCode + "]")
//推荐使用submit,支持传递任务label
val jobExecuteResult = toSubmit(user, executeCode)
//0.X: val jobExecuteResult = toExecute(user, executeCode)
println("execId: " + jobExecuteResult.getExecID + ", taskId: " + jobExecuteResult.taskID)
Expand Down Expand Up @@ -271,14 +272,14 @@ object LinkisClientTest {
// set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant
val labels: util.Map[String, Any] = new util.HashMap[String, Any]
labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // required engineType Label
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// required execute user and creator
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // required codeType
labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-APPName");// 请求的用户和应用名,两个参数都不能少,其中APPName不能带有"-"建议替换为"_"
labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // 指定脚本类型

val startupMap = new java.util.HashMap[String, Any]()
// Support setting engine native parameters,For example: parameters of engines such as spark/hive
startupMap.put("spark.executor.instances", 2);
// setting linkis params
startupMap.put("wds.linkis.rm.yarnqueue", "dws");
startupMap.put("wds.linkis.rm.yarnqueue", "default");
// 2. build jobSubmitAction
val jobSubmitAction = JobSubmitAction.builder
.addExecuteCode(code)
Expand All @@ -305,10 +306,10 @@ object LinkisClientTest {
// Support setting engine native parameters,For example: parameters of engines such as spark/hive
startupMap.put("spark.executor.instances", 2)
// setting linkis params
startupMap.put("wds.linkis.rm.yarnqueue", "dws")
startupMap.put("wds.linkis.rm.yarnqueue", "default")
// 2. build JobExecuteAction (0.X old way of using)
val executionAction = JobExecuteAction.builder()
.setCreator("IDE") //creator, the system name of the client requesting linkis, used for system-level isolation
.setCreator("APPName") //creator, the system name of the client requesting linkis, used for system-level isolation
.addExecuteCode(code) //Execution Code
.setEngineTypeStr("spark") // engineConn type
.setRunTypeStr("py") // code type
Expand Down

0 comments on commit afcca39

Please sign in to comment.