Skip to content

Commit

Permalink
Merge pull request apache#2351 from apache/dev-1.1.3
Browse files Browse the repository at this point in the history
Merge dev-1.1.3 into master
  • Loading branch information
peacewong committed Jun 27, 2022
2 parents daa7bb4 + 659bfde commit 6582f49
Show file tree
Hide file tree
Showing 155 changed files with 599 additions and 4,292 deletions.
1 change: 1 addition & 0 deletions LICENSE-binary-ui
Original file line number Diff line number Diff line change
Expand Up @@ -241,4 +241,5 @@ See licenses-binary-ui/ for text of these licenses.
(The MIT License) worker-loader@2.0.0 (https://github.com/webpack-contrib/worker-loader)
(The MIT License) @form-create/iview@2.5.13 (https://github.com/xaboy/form-create)
(The MIT License) object-to-formdata@4.2.2 (https://github.com/therealparmesh/object-to-formdata)
(The MIT License) jsencrypt@3.2.1 (https://github.com/travist/jsencrypt)
(The Apache-2.0 License) material-design-icons@^3.0.1 (https://github.com/google/material-design-icons)
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@ eureka:
client:
serviceUrl:
defaultZone: http://127.0.0.1:20303/eureka/


management:
endpoints:
web:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,4 +56,6 @@ spring.spring.servlet.multipart.max-file-size=500MB
spring.spring.servlet.multipart.max-request-size=500MB

# note:value of zero means Jetty will never write to disk. https://github.com/spring-projects/spring-boot/issues/9073
spring.spring.servlet.multipart.file-size-threshold=50MB
spring.spring.servlet.multipart.file-size-threshold=50MB
# note: org.springframework.cloud.config.client.ConfigServiceBootstrapConfiguration.configServicePropertySource need to disable
spring.spring.cloud.config.enabled=false
23 changes: 0 additions & 23 deletions assembly-combined-package/assembly-combined/conf/token.properties

This file was deleted.

2 changes: 1 addition & 1 deletion assembly-combined-package/assembly-combined/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.linkis</groupId>
<artifactId>linkis</artifactId>
<version>1.1.2</version>
<version>1.1.3</version>
</parent>
<modelVersion>4.0.0</modelVersion>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>org.apache.linkis</groupId>
<version>1.1.2</version>
<version>1.1.3</version>
</parent>
<modelVersion>4.0.0</modelVersion>

Expand Down
43 changes: 33 additions & 10 deletions assembly-combined-package/bin/install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -186,14 +186,25 @@ if [ "$YARN_RESTFUL_URL" != "" ]
then
sed -i ${txt} "s#@YARN_RESTFUL_URL#$YARN_RESTFUL_URL#g" $LINKIS_HOME/db/linkis_dml.sql
fi
if [ "$KERBEROS_ENABLE" != "" ]

if [ "$YARN_AUTH_ENABLE" != "" ]
then
sed -i ${txt} "s#@KERBEROS_ENABLE#$KERBEROS_ENABLE#g" $LINKIS_HOME/db/linkis_dml.sql
sed -i ${txt} "s#@PRINCIPAL_NAME#$PRINCIPAL_NAME#g" $LINKIS_HOME/db/linkis_dml.sql
sed -i ${txt} "s#@KEYTAB_PATH#$KEYTAB_PATH#g" $LINKIS_HOME/db/linkis_dml.sql
sed -i ${txt} "s#@KRB5_PATH#$KRB5_PATH#g" $LINKIS_HOME/db/linkis_dml.sql
sed -i ${txt} "s#@YARN_AUTH_ENABLE#$YARN_AUTH_ENABLE#g" $LINKIS_HOME/db/linkis_dml.sql
sed -i ${txt} "s#@YARN_AUTH_USER#$YARN_AUTH_USER#g" $LINKIS_HOME/db/linkis_dml.sql
sed -i ${txt} "s#@YARN_AUTH_PWD#$YARN_AUTH_PWD#g" $LINKIS_HOME/db/linkis_dml.sql
else
sed -i ${txt} "s#@KERBEROS_ENABLE#false#g" $LINKIS_HOME/db/linkis_dml.sql
sed -i ${txt} "s#@YARN_AUTH_ENABLE#false#g" $LINKIS_HOME/db/linkis_dml.sql
fi


if [ "$YARN_KERBEROS_ENABLE" != "" ]
then
sed -i ${txt} "s#@YARN_KERBEROS_ENABLE#$YARN_KERBEROS_ENABLE#g" $LINKIS_HOME/db/linkis_dml.sql
sed -i ${txt} "s#@YARN_PRINCIPAL_NAME#$YARN_PRINCIPAL_NAME#g" $LINKIS_HOME/db/linkis_dml.sql
sed -i ${txt} "s#@YARN_KEYTAB_PATH#$YARN_KEYTAB_PATH#g" $LINKIS_HOME/db/linkis_dml.sql
sed -i ${txt} "s#@YARN_KRB5_PATH#$YARN_KRB5_PATH#g" $LINKIS_HOME/db/linkis_dml.sql
else
sed -i ${txt} "s#@YARN_KERBEROS_ENABLE#false#g" $LINKIS_HOME/db/linkis_dml.sql
fi

SERVER_IP=$local_host
Expand Down Expand Up @@ -295,6 +306,13 @@ sed -i ${txt} "s#\#hive.config.dir.*#hive.config.dir=$HIVE_CONF_DIR#g" $common_
#spark config
sed -i ${txt} "s#\#spark.config.dir.*#spark.config.dir=$SPARK_CONF_DIR#g" $common_conf

if [ "true" == "$HADOOP_KERBEROS_ENABLE" ]
then
sed -i ${txt} '$a \wds.linkis.keytab.enable=true' $LINKIS_HOME/conf/linkis.properties
sed -i ${txt} '$a \wds.linkis.keytab.file=$HADOOP_KEYTAB_PATH' $LINKIS_HOME/conf/linkis.properties
fi


sed -i ${txt} "s#wds.linkis.home.*#wds.linkis.home=$LINKIS_HOME#g" $common_conf

sed -i ${txt} "s#wds.linkis.filesystem.root.path.*#wds.linkis.filesystem.root.path=$WORKSPACE_USER_ROOT_PATH#g" $common_conf
Expand All @@ -303,12 +321,17 @@ sed -i ${txt} "s#wds.linkis.filesystem.hdfs.root.path.*#wds.linkis.filesystem.h
##gateway
gateway_conf=$LINKIS_HOME/conf/linkis-mg-gateway.properties
echo "update conf $gateway_conf"
defaultPwd=`date +%s%N | md5sum |cut -c 1-9`
if [ "$deployPwd" == "" ]
then
deployPwd=`date +%s%N | md5sum |cut -c 1-9`
fi


sed -i ${txt} "s#wds.linkis.ldap.proxy.url.*#wds.linkis.ldap.proxy.url=$LDAP_URL#g" $gateway_conf
sed -i ${txt} "s#wds.linkis.ldap.proxy.baseDN.*#wds.linkis.ldap.proxy.baseDN=$LDAP_BASEDN#g" $gateway_conf
sed -i ${txt} "s#wds.linkis.ldap.proxy.userNameFormat.*#wds.linkis.ldap.proxy.userNameFormat=$LDAP_USER_NAME_FORMAT#g" $gateway_conf
sed -i ${txt} "s#wds.linkis.admin.user.*#wds.linkis.admin.user=$deployUser#g" $gateway_conf
sed -i ${txt} "s#\#wds.linkis.admin.password.*#wds.linkis.admin.password=$defaultPwd#g" $gateway_conf
sed -i ${txt} "s#\#wds.linkis.admin.password.*#wds.linkis.admin.password=$deployPwd#g" $gateway_conf
if [ "$GATEWAY_PORT" != "" ]
then
sed -i ${txt} "s#spring.server.port.*#spring.server.port=$GATEWAY_PORT#g" $gateway_conf
Expand Down Expand Up @@ -405,7 +428,7 @@ if [ "true" == "$PROMETHEUS_ENABLE" ]
then
echo "prometheus is enabled"
sed -i ${txt} '$a \wds.linkis.prometheus.enable={{ PROMETHEUS_ENABLE }}' $LINKIS_HOME/conf/linkis.properties
sed -i ${txt} '$a \wds.linkis.server.user.restful.uri.pass.auth=/actuator/prometheus,' $LINKIS_HOME/conf/linkis.properties
sed -i ${txt} '$a \wds.linkis.server.user.restful.uri.pass.auth=/api/rest_j/v1/actuator/prometheus,' $LINKIS_HOME/conf/linkis.properties
sed -i ${txt} '/eureka:/a \\ instance:\n metadata-map:\n prometheus.path: ${prometheus.path:${prometheus.endpoint}}' $LINKIS_HOME/conf/application-linkis.yml
sed -i ${txt} 's#include: refresh,info#include: refresh,info,health,metrics,prometheus#g' $LINKIS_HOME/conf/application-linkis.yml
sed -i ${txt} '/instance:/a \ metadata-map:\n prometheus.path: ${prometheus.path:/actuator/prometheus}' $LINKIS_HOME/conf/application-eureka.yml
Expand All @@ -419,4 +442,4 @@ sudo chmod -R 777 $LINKIS_HOME/sbin/*
echo -e "\n"

echo -e "${GREEN}Congratulations!${NC} You have installed Linkis $LINKIS_VERSION successfully, please use sh $LINKIS_HOME/sbin/linkis-start-all.sh to start it!"
echo -e "Your default account/password is ${GREEN}[$deployUser/$defaultPwd]${NC}, you can find in $LINKIS_HOME/conf/linkis-mg-gateway.properties"
echo -e "Your default account/password is ${GREEN}[$deployUser/$deployPwd]${NC}, you can find in $LINKIS_HOME/conf/linkis-mg-gateway.properties"
46 changes: 35 additions & 11 deletions assembly-combined-package/deploy-config/linkis-env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@

### deploy user
deployUser=hadoop
##If you don't set it, a random password string will be generated during installation
deployPwd=


##Linkis_SERVER_VERSION
LINKIS_SERVER_VERSION=v1
Expand All @@ -50,27 +53,47 @@ RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis
# Active resourcemanager address needed. Recommended to add all ha addresses. eg YARN_RESTFUL_URL="http://127.0.0.1:8088;http://127.0.0.2:8088"
YARN_RESTFUL_URL="http://127.0.0.1:8088"

## request Yarn resource restful interface When Yarn need auth by user
## If your environment yarn interface can be accessed directly, ignore it
#YARN_AUTH_ENABLE=false
#YARN_AUTH_USER=hadoop
#YARN_AUTH_PWD=123456

## request spnego enabled Yarn resource restful interface When Yarn enable kerberos
## If your environment yarn interface can be accessed directly, ignore it
#KERBEROS_ENABLE=true
#PRINCIPAL_NAME=yarn
#KEYTAB_PATH=/etc/security/keytabs/yarn.keytab
#KRB5_PATH=/etc/krb5.conf
#YARN_KERBEROS_ENABLE=true
#YARN_PRINCIPAL_NAME=yarn
#YARN_KEYTAB_PATH=/etc/security/keytabs/yarn.keytab
#YARN_KRB5_PATH=/etc/krb5.conf


##############################################################
#
# NOTICE:
# You can also set these variables as system environment in ~/.bashrc file

###HADOOP CONF DIR
#HADOOP
HADOOP_HOME=/appcom/Install/hadoop
HADOOP_CONF_DIR=/appcom/config/hadoop-config
#HADOOP_KERBEROS_ENABLE=true
#HADOOP_KEYTAB_PATH=/appcom/keytab/

###HIVE CONF DIR
#Hive
HIVE_HOME=/appcom/Install/hive
HIVE_CONF_DIR=/appcom/config/hive-config

###SPARK CONF DIR
#Spark
SPARK_HOME=/appcom/Install/spark
SPARK_CONF_DIR=/appcom/config/spark-config


## Engine version conf
#SPARK_VERSION
#SPARK_VERSION=2.4.3

##HIVE_VERSION
#HIVE_VERSION=2.3.3

#PYTHON_VERSION=python2

################### The install Configuration of all Micro-Services #####################
Expand All @@ -84,7 +107,8 @@ SPARK_CONF_DIR=/appcom/config/spark-config

### EUREKA install information
### You can access it in your browser at the address below:http://${EUREKA_INSTALL_IP}:${EUREKA_PORT}
#EUREKA_INSTALL_IP=127.0.0.1 # Microservices Service Registration Discovery Center
#EUREKA: Microservices Service Registration Discovery Center
#EUREKA_INSTALL_IP=127.0.0.1
EUREKA_PORT=20303
export EUREKA_PREFER_IP=false

Expand Down Expand Up @@ -140,7 +164,7 @@ export SERVER_HEAP_SIZE="512M"
##The decompression directory and the installation directory need to be inconsistent
#LINKIS_HOME=/appcom/Install/LinkisInstall

LINKIS_VERSION=1.1.2
LINKIS_VERSION=1.1.3

# for install
LINKIS_PUBLIC_MODULE=lib/linkis-commons/public-module
Expand All @@ -152,11 +176,11 @@ LINKIS_PUBLIC_MODULE=lib/linkis-commons/public-module
export PROMETHEUS_ENABLE=false

#If you want to start metadata related microservices, you can set this export ENABLE_METADATA_MANAGE=true
export ENABLE_METADATA_MANAGER=false
export ENABLE_METADATA_MANAGER=true

#If you only want to experience linkis streamlined services, not rely on hdfs
#you can set the following configuration to false and for the configuration related to the file directory,
#use path mode of [file://] to replace [hdfs://]
export ENABLE_HDFS=true
export ENABLE_HIVE=true
export ENABLE_SPARK=true
export ENABLE_SPARK=true
2 changes: 1 addition & 1 deletion assembly-combined-package/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
<parent>
<groupId>org.apache.linkis</groupId>
<artifactId>linkis</artifactId>
<version>1.1.2</version>
<version>1.1.3</version>
</parent>
<modelVersion>4.0.0</modelVersion>

Expand Down
2 changes: 1 addition & 1 deletion db/linkis_ddl.sql
Original file line number Diff line number Diff line change
Expand Up @@ -413,7 +413,7 @@ CREATE TABLE `linkis_ps_cs_context_history` (
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp',
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time',
`access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time',
KEY `keyword` (`keyword`(191)),
KEY `keyword` (`keyword`(191))
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

-- ----------------------------
Expand Down
5 changes: 2 additions & 3 deletions db/linkis_dml.sql
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`,
-- spark
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'spark引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', 'spark执行器实例最大并发数', '1', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', 'spark执行器核心个数', '1', 'NumInterval', '[1,2]', '0', '0', '1','spark资源设置', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', 'spark执行器核心个数', '1', 'NumInterval', '[1,8]', '0', '0', '1','spark资源设置', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:1-15,单位:G', 'spark执行器内存大小', '1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '3', 'spark资源设置', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '0', '1', '1', 'spark资源设置','spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark');
Expand Down Expand Up @@ -276,9 +276,8 @@ insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_val
(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @OPENLOOKENG_ALL);


insert into `linkis_cg_rm_external_resource_provider`(`id`,`resource_type`,`name`,`labels`,`config`) values
(1,'Yarn','default',NULL,'{\r\n\"rmWebAddress\": \"@YARN_RESTFUL_URL\",\r\n\"hadoopVersion\": \"2.7.2\",\r\n\"authorEnable\":false,\r\n\"user\":\"hadoop\",\r\n\"pwd\":\"123456\",\r\n\"kerberosEnable\":@KERBEROS_ENABLE,\r\n\"principalName\":\"@PRINCIPAL_NAME\",\r\n\"keytabPath\":\"@KEYTAB_PATH\",\r\n\"krb5Path\":\"@KRB5_PATH\"\r\n}');
(1,'Yarn','default',NULL,'{\r\n\"rmWebAddress\": \"@YARN_RESTFUL_URL\",\r\n\"hadoopVersion\": \"2.7.2\",\r\n\"authorEnable\":@YARN_AUTH_ENABLE,\r\n\"user\":\"@YARN_AUTH_USER\",\r\n\"pwd\":\"@YARN_AUTH_PWD\",\r\n\"kerberosEnable\":@YARN_KERBEROS_ENABLE,\r\n\"principalName\":\"@YARN_PRINCIPAL_NAME\",\r\n\"keytabPath\":\"@YARN_KEYTAB_PATH\",\r\n\"krb5Path\":\"@YARN_KRB5_PATH\"\r\n}');

-- errorcode
-- 01 linkis server
Expand Down
Loading

0 comments on commit 6582f49

Please sign in to comment.