Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Issue-2318] Change ENABLE METADATA MANAGE default from false to true #2319

Merged
merged 2 commits into from Jun 24, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
23 changes: 0 additions & 23 deletions assembly-combined-package/assembly-combined/conf/token.properties

This file was deleted.

2 changes: 1 addition & 1 deletion assembly-combined-package/deploy-config/linkis-env.sh
Expand Up @@ -152,7 +152,7 @@ LINKIS_PUBLIC_MODULE=lib/linkis-commons/public-module
export PROMETHEUS_ENABLE=false

#If you want to start metadata related microservices, you can set this export ENABLE_METADATA_MANAGE=true
export ENABLE_METADATA_MANAGER=false
export ENABLE_METADATA_MANAGER=true

#If you only want to experience linkis streamlined services, not rely on hdfs
#you can set the following configuration to false and for the configuration related to the file directory,
Expand Down
2 changes: 1 addition & 1 deletion db/linkis_dml.sql
Expand Up @@ -66,7 +66,7 @@ INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`,
-- spark
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'spark引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', 'spark执行器实例最大并发数', '1', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', 'spark执行器核心个数', '1', 'NumInterval', '[1,2]', '0', '0', '1','spark资源设置', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', 'spark执行器核心个数', '1', 'NumInterval', '[1,8]', '0', '0', '1','spark资源设置', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:1-15,单位:G', 'spark执行器内存大小', '1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '3', 'spark资源设置', 'spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '0', '1', '1', 'spark资源设置','spark');
INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark');
Expand Down
Expand Up @@ -17,12 +17,16 @@

package org.apache.linkis.manager.common.entity.enumeration;

/**
* Engine remains unreleased type
*
* <pre>
* Default: automatic release;
* day: keep one engine during working hours from 9:00 to 21:00;
* week: weekdays Monday to Friday keep one engine;
* </pre>
*/
public enum MaintainType {

/**
* Engine remains unreleased type Default: automatic release day: keep one engine during working
* hours from 9:00 to 21:00 week: weekdays Monday to Friday keep one engine
*/
Default,
day,
week
Expand Down
Expand Up @@ -28,7 +28,7 @@ object GatewayConfiguration {
val TOKEN_KEY = "Token-Code"
val TOKEN_USER_KEY = "Token-User"
val ENABLE_TOKEN_AUTHENTICATION = CommonVars("wds.linkis.gateway.conf.enable.token.auth", false)
val TOKEN_AUTHENTICATION_CONFIG = CommonVars("wds.linkis.gateway.conf.token.auth.config", "token.properties")

val TOKEN_AUTHENTICATION_SCAN_INTERVAL = CommonVars("wds.linkis.gateway.conf.token.auth.scan.interval", 1000 * 60 * 10)

val PASS_AUTH_REQUEST_URI = CommonVars("wds.linkis.gateway.conf.url.pass.auth", "/dws/").getValue.split(",")
Expand Down
Expand Up @@ -35,37 +35,6 @@ object TokenAuthentication extends Logging {
this.tokenService = tokenService
}

// private val (props, file) = if(ENABLE_TOKEN_AUTHENTICATION.getValue)
// (new Properties, new File(this.getClass.getClassLoader.getResource(TOKEN_AUTHENTICATION_CONFIG.getValue).toURI.getPath))
// else (null, null)
// private var lastModified = 0l
//
// def init(): Unit = {
// if(ENABLE_TOKEN_AUTHENTICATION.getValue) {
// Utils.defaultScheduler.scheduleAtFixedRate(new Runnable {
// override def run(): Unit = Utils.tryAndError(tokenService.init())
// }, TOKEN_AUTHENTICATION_SCAN_INTERVAL.getValue, TOKEN_AUTHENTICATION_SCAN_INTERVAL.getValue, TimeUnit.MILLISECONDS)
// // init()
// tokenService.init()
// }
// }

//
// private def init(): Unit = if(file.lastModified() > lastModified) {
// lastModified = file.lastModified()
// info(s"loading token authentication file $file.")
// val newProps = new Properties
// val input = FileUtils.openInputStream(file)
// Utils.tryFinally(newProps.load(input))(IOUtils.closeQuietly(input))
// props.putAll(newProps)
// }
//
// private def validateTokenUser(token: String, tokenUser: String): Boolean = {
// val tokenUsers = props.getProperty(token)
// if(tokenUsers == "*" || (StringUtils.isNotBlank(tokenUsers) && tokenUsers.contains(tokenUser))) true
// else false
// }

def isTokenRequest(gatewayContext: GatewayContext) : Boolean = {
(gatewayContext.getRequest.getHeaders.containsKey(TOKEN_KEY) &&
gatewayContext.getRequest.getHeaders.containsKey(TOKEN_USER_KEY)) || (
Expand Down