From d46d0a7499038f376de72a0d54a959a5e7f97969 Mon Sep 17 00:00:00 2001 From: Licho Date: Wed, 14 Dec 2022 14:56:47 +0800 Subject: [PATCH 01/47] fix: result not have column. --- .../main/java/com/dlink/result/ResultRunnable.java | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/dlink-core/src/main/java/com/dlink/result/ResultRunnable.java b/dlink-core/src/main/java/com/dlink/result/ResultRunnable.java index 1b1d5b5d8a..c094f4156c 100644 --- a/dlink-core/src/main/java/com/dlink/result/ResultRunnable.java +++ b/dlink-core/src/main/java/com/dlink/result/ResultRunnable.java @@ -56,7 +56,7 @@ public class ResultRunnable implements Runnable { private final String timeZone; public ResultRunnable(TableResult tableResult, Integer maxRowNum, boolean isChangeLog, boolean isAutoCancel, - String timeZone) { + String timeZone) { this.tableResult = tableResult; this.maxRowNum = maxRowNum; this.isChangeLog = isChangeLog; @@ -92,15 +92,14 @@ private void catchChangLog(SelectResult selectResult) { List> rows = selectResult.getRowData(); List columns = FlinkUtil.catchColumn(tableResult); + columns.add(0, FlinkConstant.OP); + selectResult.setColumns(new LinkedHashSet<>(columns)); Streams.stream(tableResult.collect()).limit(maxRowNum).forEach(row -> { - Map map = getFieldMap(columns, row); + Map map = getFieldMap(columns.subList(1, columns.size()), row); map.put(FlinkConstant.OP, row.getKind().shortString()); rows.add(map); }); - columns.add(0, FlinkConstant.OP); - selectResult.setColumns(new LinkedHashSet<>(columns)); - if (isAutoCancel) { tableResult.getJobClient().ifPresent(JobClient::cancel); } @@ -110,6 +109,7 @@ private void catchData(SelectResult selectResult) { List> rows = selectResult.getRowData(); List columns = FlinkUtil.catchColumn(tableResult); + selectResult.setColumns(new LinkedHashSet<>(columns)); Streams.stream(tableResult.collect()).limit(maxRowNum).forEach(row -> { Map map = getFieldMap(columns, row); if (RowKind.UPDATE_BEFORE == row.getKind() || RowKind.DELETE == row.getKind()) { @@ -118,8 +118,6 @@ private void catchData(SelectResult selectResult) { rows.add(map); } }); - - selectResult.setColumns(new LinkedHashSet<>(columns)); } private Map getFieldMap(List columns, Row row) { From 2cd3f75fe3dc5e9491e7d658c31d3d082ce385a4 Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Sat, 4 May 2024 18:37:27 +0800 Subject: [PATCH 02/47] feat: udf select form --- .../org/dinky/controller/JarController.java | 31 +++++++++++++++ .../main/java/org/dinky/utils/UDFUtils.java | 25 ++++++++++++ .../components/Flink/OptionsSelect/index.tsx | 1 + .../src/components/Flink/UdfSelect/index.tsx | 34 ++++++++++++++++ dinky-web/src/locales/en-US/pages.ts | 7 ++++ dinky-web/src/locales/zh-CN/pages.ts | 7 ++++ .../RightContainer/JobConfig/index.tsx | 39 ++++++++++++++++++- .../RightContainer/JobConfig/service.tsx | 7 +++- dinky-web/src/pages/DataStudio/model.ts | 26 +++++++++++-- .../components/ConfigurationModal/index.tsx | 2 +- dinky-web/src/services/endpoints.tsx | 1 + dinky-web/tsconfig.json | 2 +- 12 files changed, 174 insertions(+), 8 deletions(-) create mode 100644 dinky-web/src/components/Flink/UdfSelect/index.tsx diff --git a/dinky-admin/src/main/java/org/dinky/controller/JarController.java b/dinky-admin/src/main/java/org/dinky/controller/JarController.java index eee5788962..5d85f7168a 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JarController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JarController.java @@ -19,6 +19,9 @@ package org.dinky.controller; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.Getter; import org.dinky.data.model.Task; import org.dinky.data.result.Result; import org.dinky.function.constant.PathConstant; @@ -28,10 +31,14 @@ import org.apache.flink.table.catalog.FunctionLanguage; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import org.dinky.utils.UDFUtils; +import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; @@ -78,4 +85,28 @@ public Result>> generateJar() { resultMap.get("failed")); return Result.succeed(resultMap, msg); } + + @GetMapping("/udf/geUdfs") + @ApiOperation("Get UDFs") + public Result> getUdfs() { + List staticUdfs = UDFUtils.getStaticUdfs(); + List dynamicUdfs = taskService.getAllUDF().stream() + .map(UDFUtils::taskToUDF) + .collect(Collectors.toList()); + List allUdfs = new ArrayList<>(staticUdfs); + allUdfs.addAll(dynamicUdfs); + List result = allUdfs.stream().map(udf -> { + String name = udf.getClassName().substring(udf.getClassName().lastIndexOf(".") + 1); + name = name.substring(0, 1).toLowerCase() + name.substring(1); + return new UdfInfo(name, udf.getClassName()); + }).collect(Collectors.toList()); + return Result.succeed(result); + } + + @Data + @AllArgsConstructor + public static class UdfInfo { + String name; + String className; + } } diff --git a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java index 1e771df27e..87dbecdd99 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java +++ b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java @@ -19,6 +19,7 @@ package org.dinky.utils; +import org.apache.flink.api.common.functions.Function; import org.dinky.assertion.Asserts; import org.dinky.data.exception.BusException; import org.dinky.data.model.Task; @@ -26,6 +27,12 @@ import org.dinky.function.util.UDFUtil; import org.apache.flink.table.catalog.FunctionLanguage; +import org.reflections.Reflections; +import org.reflections.scanners.Scanners; + +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; public class UDFUtils extends UDFUtil { @@ -41,4 +48,22 @@ public static UDF taskToUDF(Task task) { throw new BusException("udf `class` config is null,please check your udf task config"); } } + + public static List getStaticUdfs() { + Reflections reflections = new Reflections(Function.class.getPackage().getName()); + Set> operations = + reflections.get(Scanners.SubTypes.of(Function.class).asClass()); + + return operations.stream() + .filter(operation -> !operation.isInterface() + && !operation.getName + ().startsWith("org.apache.flink")) + .map(operation -> UDF.builder() + .className(operation.getName()) + .functionLanguage(FunctionLanguage.JAVA) + .build()) + .collect(Collectors.toList()); + } + + } diff --git a/dinky-web/src/components/Flink/OptionsSelect/index.tsx b/dinky-web/src/components/Flink/OptionsSelect/index.tsx index 1dc648e2a5..5e7213d79a 100644 --- a/dinky-web/src/components/Flink/OptionsSelect/index.tsx +++ b/dinky-web/src/components/Flink/OptionsSelect/index.tsx @@ -21,6 +21,7 @@ import { l } from '@/utils/intl'; import { ProFormSelect } from '@ant-design/pro-components'; import { ProFormSelectProps } from '@ant-design/pro-form/es/components/Select'; import { Divider, Typography } from 'antd'; +import React from "react"; const { Link } = Typography; diff --git a/dinky-web/src/components/Flink/UdfSelect/index.tsx b/dinky-web/src/components/Flink/UdfSelect/index.tsx new file mode 100644 index 0000000000..6e9db28702 --- /dev/null +++ b/dinky-web/src/components/Flink/UdfSelect/index.tsx @@ -0,0 +1,34 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import {l} from '@/utils/intl'; +import {ProFormSelect} from '@ant-design/pro-components'; +import {ProFormSelectProps} from '@ant-design/pro-form/es/components/Select'; +import {Divider, Typography} from 'antd'; +import React from "react"; + +const {Link} = Typography; + +export type FlinkUdfOptionsProps = ProFormSelectProps & object; + +const FlinkUdfOptionsSelect = (props: FlinkUdfOptionsProps) => { + return<> +}; + +export default FlinkUdfOptionsSelect; diff --git a/dinky-web/src/locales/en-US/pages.ts b/dinky-web/src/locales/en-US/pages.ts index fbd9956fc2..53820ebd59 100644 --- a/dinky-web/src/locales/en-US/pages.ts +++ b/dinky-web/src/locales/en-US/pages.ts @@ -439,6 +439,11 @@ export default { 'pages.datastudio.label.jobConfig.addConfig': 'Add Config item', 'pages.datastudio.label.jobConfig.addConfig.params': 'parameters', 'pages.datastudio.label.jobConfig.addConfig.value': 'value', + 'pages.datastudio.label.udf': 'Udf Item', + 'pages.datastudio.label.udf.tip': 'Inject UDF item', + 'pages.datastudio.label.udf.injectUdf': 'Inject UDF item', + 'pages.datastudio.label.udf.name': 'function name', + 'pages.datastudio.label.udf.className': 'class name', 'pages.datastudio.label.jobConfig.alertGroup': 'Alarm Group', 'pages.datastudio.label.jobConfig.alertGroup.tip': 'Select alert group', 'pages.datastudio.label.jobConfig.batchmode': 'Batch Mode', @@ -466,6 +471,8 @@ export default { 'pages.datastudio.label.jobConfig.other': 'Other Config', 'pages.datastudio.label.jobConfig.other.tip': 'Other Config items will be applied to the execution environment, such as pipeline.name', + 'pages.datastudio.label.jobConfig.udf': 'UDF injected', + 'pages.datastudio.label.jobConfig.udf.tip': 'Automatically inject UDF', 'pages.datastudio.label.jobConfig.parallelism': 'Parallelism', 'pages.datastudio.label.jobConfig.parallelism.tip': 'Set the parallelism of Flink tasks, the minimum value is 1', diff --git a/dinky-web/src/locales/zh-CN/pages.ts b/dinky-web/src/locales/zh-CN/pages.ts index 35bfcf9b32..f70837e4d8 100644 --- a/dinky-web/src/locales/zh-CN/pages.ts +++ b/dinky-web/src/locales/zh-CN/pages.ts @@ -420,6 +420,11 @@ export default { 'pages.datastudio.label.jobConfig.addConfig': '添加配置项', 'pages.datastudio.label.jobConfig.addConfig.params': '参数', 'pages.datastudio.label.jobConfig.addConfig.value': '值', + 'pages.datastudio.label.udf': '注入UDF算子', + 'pages.datastudio.label.udf.tip': '自动注入UDF算子', + 'pages.datastudio.label.udf.injectUdf': '注入UDF', + 'pages.datastudio.label.udf.name': '函数名称', + 'pages.datastudio.label.udf.className': '类名', 'pages.datastudio.label.jobConfig.alertGroup': '告警组', 'pages.datastudio.label.jobConfig.alertGroup.tip': '选择告警组', 'pages.datastudio.label.jobConfig.batchmode': '批模式', @@ -443,6 +448,8 @@ export default { '【增强特性】 开启语句集机制,将把多个 Insert 语句合成一个 JobGraph 再进行提交,Select 语句无效', 'pages.datastudio.label.jobConfig.other': '其他配置', 'pages.datastudio.label.jobConfig.other.tip': '其他配置项,将被应用于执行环境,如 pipeline.name', + 'pages.datastudio.label.jobConfig.udf': 'UDF注入', + 'pages.datastudio.label.jobConfig.udf.tip': '自动注入UDF算子', 'pages.datastudio.label.jobConfig.parallelism': '任务并行度', 'pages.datastudio.label.jobConfig.parallelism.tip': '设置Flink任务的并行度,最小为 1', 'pages.datastudio.label.jobConfig.savePointStrategy': 'Savepoint策略', diff --git a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx index af0a1e66d9..4b29a82a83 100644 --- a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx +++ b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx @@ -52,6 +52,7 @@ import { import { Alert, Space } from 'antd'; import { useForm } from 'antd/es/form/Form'; import { debounce } from 'lodash'; +import React from 'react'; import { useEffect, useState } from 'react'; import { connect } from 'umi'; @@ -64,7 +65,8 @@ const JobConfig = (props: any) => { env, group, rightContainer, - flinkConfigOptions + flinkConfigOptions, + flinkUdfOptions } = props; const current = getCurrentData(panes, activeKey); @@ -80,6 +82,9 @@ const JobConfig = (props: any) => { dispatch({ type: ALERT_MODEL_ASYNC.queryAlertGroup }); + dispatch({ + type: STUDIO_MODEL_ASYNC.queryFlinkUdfOptions + }); setSelectRunMode(current?.type); form.setFieldsValue({ ...current, type: current?.type }); }, [current]); @@ -302,6 +307,35 @@ const JobConfig = (props: any) => { + + + + + + + + ); @@ -314,5 +348,6 @@ export default connect(({ Studio, Alert }: { Studio: StateType; Alert: AlertStat tabs: Studio.tabs, env: Studio.env, group: Alert.group, - flinkConfigOptions: Studio.flinkConfigOptions + flinkConfigOptions: Studio.flinkConfigOptions, + flinkUdfOptions: Studio.flinkUdfOptions }))(JobConfig); diff --git a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/service.tsx b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/service.tsx index 07d3de4bb4..62512567ac 100644 --- a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/service.tsx +++ b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/service.tsx @@ -34,6 +34,11 @@ export function getFlinkConfigs() { return queryDataByParams(API_CONSTANTS.FLINK_CONF_CONFIG_OPTIONS); } -export function querySuggessionData(params: any) { +export function getFlinkUdfOptions() { + return queryDataByParams(API_CONSTANTS.FLINK_UDF_OPTIONS); + +} + +export function querySuggestionData(params: any) { return getDataByParams(API_CONSTANTS.SUGGESTION_QUERY_ALL_SUGGESTIONS, params); } diff --git a/dinky-web/src/pages/DataStudio/model.ts b/dinky-web/src/pages/DataStudio/model.ts index 25ec3ed70f..c56d187bdc 100644 --- a/dinky-web/src/pages/DataStudio/model.ts +++ b/dinky-web/src/pages/DataStudio/model.ts @@ -24,9 +24,9 @@ import { getTaskData } from '@/pages/DataStudio/LeftContainer/Project/service'; import { getClusterConfigurationData, getEnvData, - getFlinkConfigs, + getFlinkConfigs, getFlinkUdfOptions, getSessionData, - querySuggessionData + querySuggestionData } from '@/pages/DataStudio/RightContainer/JobConfig/service'; import { QueryParams } from '@/pages/RegCenter/DataSource/components/DataSourceDetail/RightTagsRouter/data'; import { SuggestionInfo } from '@/types/Public/data'; @@ -281,6 +281,7 @@ export type StateType = { sessionCluster: Cluster.Instance[]; clusterConfiguration: Cluster.Config[]; flinkConfigOptions: DefaultOptionType[]; + flinkUdfOptions: DefaultOptionType[]; env: EnvType[]; tabs: TabsType; bottomContainerContent: BottomContainerContent; @@ -294,6 +295,7 @@ export type ModelType = { effects: { queryProject: Effect; queryFlinkConfigOptions: Effect; + queryFlinkUdfOptions: Effect; querySuggestions: Effect; queryEnv: Effect; queryDatabaseList: Effect; @@ -333,6 +335,7 @@ export type ModelType = { saveFooterValue: Reducer; updateJobRunningMsg: Reducer; saveFlinkConfigOptions: Reducer; + saveFlinkUdfOptions: Reducer; updateSuggestions: Reducer; }; }; @@ -383,6 +386,7 @@ const Model: ModelType = { sessionCluster: [], clusterConfiguration: [], flinkConfigOptions: [], + flinkUdfOptions: [], env: [], footContainer: { codePosition: [1, 1], @@ -422,8 +426,15 @@ const Model: ModelType = { payload: response }); }, + *queryFlinkUdfOptions({ payload }, { call, put }) { + const response: [] = yield call(getFlinkUdfOptions, payload); + yield put({ + type: 'saveUdfOptions', + payload: response + }); + }, *querySuggestions({ payload }, { call, put }) { - const response: SuggestionInfo[] = yield call(querySuggessionData, payload); + const response: SuggestionInfo[] = yield call(querySuggestionData, payload); yield put({ type: 'updateSuggestions', payload: response @@ -624,6 +635,15 @@ const Model: ModelType = { flinkConfigOptions: payload }; }, + /** + * udf options + */ + saveFlinkUdfOptions(state, { payload }) { + return { + ...state, + udfOptions: payload + }; + }, /** * 更新tabs activeKey */ diff --git a/dinky-web/src/pages/RegCenter/Cluster/Configuration/components/ConfigurationModal/index.tsx b/dinky-web/src/pages/RegCenter/Cluster/Configuration/components/ConfigurationModal/index.tsx index 5769c5ebb7..05766009de 100644 --- a/dinky-web/src/pages/RegCenter/Cluster/Configuration/components/ConfigurationModal/index.tsx +++ b/dinky-web/src/pages/RegCenter/Cluster/Configuration/components/ConfigurationModal/index.tsx @@ -60,7 +60,7 @@ const ConfigurationModal: React.FC = (props) useEffect(() => { if (visible) { dispatch({ - type: STUDIO_MODEL_ASYNC.queryFlinkConfigOptions + type:STUDIO_MODEL_ASYNC.queryFlinkConfigOptions }); } form.setFieldsValue(value); diff --git a/dinky-web/src/services/endpoints.tsx b/dinky-web/src/services/endpoints.tsx index 65d5458bf5..8e9ad52a11 100644 --- a/dinky-web/src/services/endpoints.tsx +++ b/dinky-web/src/services/endpoints.tsx @@ -285,6 +285,7 @@ export enum API_CONSTANTS { // ------------------------------------ flink conf about ------------------------------------ READ_CHECKPOINT = '/api/flinkConf/readCheckPoint', FLINK_CONF_CONFIG_OPTIONS = '/api/flinkConf/configOptions', + FLINK_UDF_OPTIONS = '/api/jar/udf/geUdfs', // ------------------------------------ suggestion ------------------------------------ SUGGESTION_QUERY_ALL_SUGGESTIONS = '/api/suggestion/queryAllSuggestions' diff --git a/dinky-web/tsconfig.json b/dinky-web/tsconfig.json index 85733f682c..4776a77a2e 100644 --- a/dinky-web/tsconfig.json +++ b/dinky-web/tsconfig.json @@ -4,7 +4,7 @@ "module": "esnext", "moduleResolution": "node", "importHelpers": true, - "jsx": "preserve", + "jsx": "react", "esModuleInterop": true, "sourceMap": true, "baseUrl": "./", From f87e778cca66f4ff419404934d8dae4d00b941fd Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Sat, 4 May 2024 18:43:12 +0800 Subject: [PATCH 03/47] chore: reset --- dinky-web/tsconfig.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dinky-web/tsconfig.json b/dinky-web/tsconfig.json index 4776a77a2e..85733f682c 100644 --- a/dinky-web/tsconfig.json +++ b/dinky-web/tsconfig.json @@ -4,7 +4,7 @@ "module": "esnext", "moduleResolution": "node", "importHelpers": true, - "jsx": "react", + "jsx": "preserve", "esModuleInterop": true, "sourceMap": true, "baseUrl": "./", From 85f938b4a20147d4cb3bc3d33e708a3e79d31120 Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Sat, 4 May 2024 21:03:07 +0800 Subject: [PATCH 04/47] feat: select udf and change name --- .../org/dinky/controller/JarController.java | 22 ++---- .../java/org/dinky/service/TaskService.java | 6 +- .../dinky/service/impl/TaskServiceImpl.java | 9 ++- .../src/components/Flink/UdfSelect/index.tsx | 24 ++++-- .../RightContainer/JobConfig/index.tsx | 73 ++++++++++++------- dinky-web/src/pages/DataStudio/model.ts | 4 +- dinky-web/src/types/Studio/data.d.ts | 1 - 7 files changed, 83 insertions(+), 56 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/JarController.java b/dinky-admin/src/main/java/org/dinky/controller/JarController.java index 5d85f7168a..42e9cadab9 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JarController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JarController.java @@ -21,9 +21,9 @@ import lombok.AllArgsConstructor; import lombok.Data; -import lombok.Getter; import org.dinky.data.model.Task; import org.dinky.data.result.Result; +import org.dinky.data.vo.CascaderVO; import org.dinky.function.constant.PathConstant; import org.dinky.function.data.model.UDF; import org.dinky.function.util.UDFUtil; @@ -32,7 +32,6 @@ import org.apache.flink.table.catalog.FunctionLanguage; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -68,7 +67,7 @@ public class JarController { @PostMapping("/udf/generateJar") @ApiOperation("Generate jar") public Result>> generateJar() { - List allUDF = taskService.getAllUDF(); + List allUDF = taskService.getAllUDFWithSavePoint(); List udfCodes = allUDF.stream() .map(task -> UDF.builder() .code(task.getStatement()) @@ -88,25 +87,16 @@ public Result>> generateJar() { @GetMapping("/udf/geUdfs") @ApiOperation("Get UDFs") - public Result> getUdfs() { + public Result> getUdfs() { List staticUdfs = UDFUtils.getStaticUdfs(); - List dynamicUdfs = taskService.getAllUDF().stream() + List dynamicUdfs = taskService.getAllUdfEnabled().stream() .map(UDFUtils::taskToUDF) .collect(Collectors.toList()); List allUdfs = new ArrayList<>(staticUdfs); allUdfs.addAll(dynamicUdfs); - List result = allUdfs.stream().map(udf -> { - String name = udf.getClassName().substring(udf.getClassName().lastIndexOf(".") + 1); - name = name.substring(0, 1).toLowerCase() + name.substring(1); - return new UdfInfo(name, udf.getClassName()); - }).collect(Collectors.toList()); + List result = allUdfs.stream().map(udf -> new CascaderVO(udf.getClassName())) + .collect(Collectors.toList()); return Result.succeed(result); } - @Data - @AllArgsConstructor - public static class UdfInfo { - String name; - String className; - } } diff --git a/dinky-admin/src/main/java/org/dinky/service/TaskService.java b/dinky-admin/src/main/java/org/dinky/service/TaskService.java index 3974ef9897..a42cdba1be 100644 --- a/dinky-admin/src/main/java/org/dinky/service/TaskService.java +++ b/dinky-admin/src/main/java/org/dinky/service/TaskService.java @@ -190,12 +190,14 @@ public interface TaskService extends ISuperService { */ Task initDefaultFlinkSQLEnv(Integer tenantId); + List getAllUdfEnabled(); + /** - * Get a list of all user-defined functions (UDFs) in the system. + * Get a list of user-defined functions (UDFs) that have savepoint path in the system. * * @return A list of {@link Task} objects representing the UDFs. */ - List getAllUDF(); + List getAllUDFWithSavePoint(); /** * Get a list of all release user-defined functions (UDFs) in the system. diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java index 441a11cd8f..043a972466 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java @@ -705,7 +705,14 @@ public JobModelOverview getJobStreamingOrBatchModelOverview() { } @Override - public List getAllUDF() { + public List getAllUdfEnabled(){ + return list(new QueryWrapper() + .in("dialect", Dialect.JAVA.getValue(), Dialect.SCALA.getValue(), Dialect.PYTHON.getValue()) + .eq("enabled", 1)); + } + + @Override + public List getAllUDFWithSavePoint() { return list(new QueryWrapper() .in("dialect", Dialect.JAVA.getValue(), Dialect.SCALA.getValue(), Dialect.PYTHON.getValue()) .eq("enabled", 1) diff --git a/dinky-web/src/components/Flink/UdfSelect/index.tsx b/dinky-web/src/components/Flink/UdfSelect/index.tsx index 6e9db28702..e7cf6263b9 100644 --- a/dinky-web/src/components/Flink/UdfSelect/index.tsx +++ b/dinky-web/src/components/Flink/UdfSelect/index.tsx @@ -17,18 +17,30 @@ * */ -import {l} from '@/utils/intl'; import {ProFormSelect} from '@ant-design/pro-components'; import {ProFormSelectProps} from '@ant-design/pro-form/es/components/Select'; -import {Divider, Typography} from 'antd'; +import {Divider} from 'antd'; import React from "react"; -const {Link} = Typography; - -export type FlinkUdfOptionsProps = ProFormSelectProps & object; +export type FlinkUdfOptionsProps = ProFormSelectProps & {}; const FlinkUdfOptionsSelect = (props: FlinkUdfOptionsProps) => { - return<> + + const renderTemplateDropDown = (item: any) => { + return ( + <> + + {item} + + ); + }; + + return ( + renderTemplateDropDown(item), ...props.fieldProps}} + /> + ); }; export default FlinkUdfOptionsSelect; diff --git a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx index 4b29a82a83..6741e7a1d5 100644 --- a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx +++ b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx @@ -18,13 +18,13 @@ */ import FlinkOptionsSelect from '@/components/Flink/OptionsSelect'; -import { SAVE_POINT_TYPE } from '@/pages/DataStudio/constants'; +import {SAVE_POINT_TYPE} from '@/pages/DataStudio/constants'; import { getCurrentData, getCurrentTab, isDataStudioTabsItemType } from '@/pages/DataStudio/function'; -import { StateType, STUDIO_MODEL, STUDIO_MODEL_ASYNC } from '@/pages/DataStudio/model'; +import {StateType, STUDIO_MODEL, STUDIO_MODEL_ASYNC} from '@/pages/DataStudio/model'; import { buildAlertGroupOptions, buildClusterConfigOptions, @@ -35,11 +35,11 @@ import { isCanRenderClusterConfiguration, isCanRenderClusterInstance } from '@/pages/DataStudio/RightContainer/JobConfig/function'; -import { JOB_LIFE_CYCLE } from '@/pages/DevOps/constants'; -import { AlertStateType, ALERT_MODEL_ASYNC } from '@/pages/RegCenter/Alert/AlertInstance/model'; -import { DIALECT, RUN_MODE, SWITCH_OPTIONS } from '@/services/constants'; -import { l } from '@/utils/intl'; -import { InfoCircleOutlined } from '@ant-design/icons'; +import {JOB_LIFE_CYCLE} from '@/pages/DevOps/constants'; +import {AlertStateType, ALERT_MODEL_ASYNC} from '@/pages/RegCenter/Alert/AlertInstance/model'; +import {DIALECT, RUN_MODE, SWITCH_OPTIONS} from '@/services/constants'; +import {l} from '@/utils/intl'; +import {InfoCircleOutlined} from '@ant-design/icons'; import { ProForm, ProFormDigit, @@ -49,19 +49,20 @@ import { ProFormSwitch, ProFormText } from '@ant-design/pro-components'; -import { Alert, Space } from 'antd'; -import { useForm } from 'antd/es/form/Form'; -import { debounce } from 'lodash'; +import {Alert, Space} from 'antd'; +import {useForm} from 'antd/es/form/Form'; +import {debounce} from 'lodash'; import React from 'react'; -import { useEffect, useState } from 'react'; -import { connect } from 'umi'; +import {useEffect, useState} from 'react'; +import {connect} from 'umi'; +import FlinkUdfOptionsSelect from "@/components/Flink/UdfSelect"; const JobConfig = (props: any) => { const { sessionCluster, clusterConfiguration, dispatch, - tabs: { panes, activeKey }, + tabs: {panes, activeKey}, env, group, rightContainer, @@ -86,7 +87,7 @@ const JobConfig = (props: any) => { type: STUDIO_MODEL_ASYNC.queryFlinkUdfOptions }); setSelectRunMode(current?.type); - form.setFieldsValue({ ...current, type: current?.type }); + form.setFieldsValue({...current, type: current?.type}); }, [current]); const onValuesChange = (change: { [key in string]: any }, all: any) => { @@ -111,15 +112,15 @@ const JobConfig = (props: any) => { pane.isModified = true; dispatch({ type: STUDIO_MODEL.saveTabs, - payload: { ...props.tabs } + payload: {...props.tabs} }); }; return ( -
+
{current?.step === JOB_LIFE_CYCLE.PUBLISH && ( <> - + )} { alertGroupId: -1 }} className={'data-studio-form'} - style={{ paddingInline: '15px', overflow: 'scroll', marginTop: 5 }} + style={{paddingInline: '15px', overflow: 'scroll', marginTop: 5}} form={form} submitter={false} layout='vertical' @@ -144,7 +145,7 @@ const JobConfig = (props: any) => { name='type' label={l('global.table.execmode')} tooltip={l('pages.datastudio.label.jobConfig.execmode.tip')} - rules={[{ required: true, message: l('pages.datastudio.label.jobConfig.execmode.tip') }]} + rules={[{required: true, message: l('pages.datastudio.label.jobConfig.execmode.tip')}]} options={buildRunModelOptions()} fieldProps={{ onChange: (value: string) => { @@ -160,7 +161,7 @@ const JobConfig = (props: any) => { {isCanRenderClusterInstance(selectRunMode) && ( <> { tooltip={l('pages.datastudio.label.jobConfig.flinksql.env.tip1')} options={buildEnvOptions(env)} rules={[ - { required: true, message: l('pages.datastudio.label.jobConfig.flinksql.env.tip1') } + {required: true, message: l('pages.datastudio.label.jobConfig.flinksql.env.tip1')} ]} showSearch allowClear={false} @@ -237,7 +238,7 @@ const JobConfig = (props: any) => { valuePropName='checked' tooltip={{ title: l('pages.datastudio.label.jobConfig.fragment.tip'), - icon: + icon: }} {...SWITCH_OPTIONS()} /> @@ -247,7 +248,7 @@ const JobConfig = (props: any) => { valuePropName='checked' tooltip={{ title: l('pages.datastudio.label.jobConfig.batchmode.tip'), - icon: + icon: }} {...SWITCH_OPTIONS()} /> @@ -284,7 +285,7 @@ const JobConfig = (props: any) => { name={['configJson', 'customConfig']} copyIconProps={false} creatorButtonProps={{ - style: { width: '100%' }, + style: {width: '100%'}, creatorButtonText: l('pages.datastudio.label.jobConfig.addConfig') }} > @@ -310,16 +311,16 @@ const JobConfig = (props: any) => { - { showSearch placeholder={l('pages.datastudio.label.udf.className')} options={flinkUdfOptions} + fieldProps={{ + onChange: (value: string) => { + const simpleClassName = value?.split('.')?.pop() ?? ''; + const lowerName = simpleClassName.charAt(0).toLowerCase() + simpleClassName.slice(1) + + form.setFieldsValue({ + 'configJson': { + 'udfConfig': [ + { + 'name': lowerName + } + ] + } + }); + } + }} /> { ); }; -export default connect(({ Studio, Alert }: { Studio: StateType; Alert: AlertStateType }) => ({ +export default connect(({Studio, Alert}: { Studio: StateType; Alert: AlertStateType }) => ({ sessionCluster: Studio.sessionCluster, clusterConfiguration: Studio.clusterConfiguration, rightContainer: Studio.rightContainer, diff --git a/dinky-web/src/pages/DataStudio/model.ts b/dinky-web/src/pages/DataStudio/model.ts index c56d187bdc..5457fb805b 100644 --- a/dinky-web/src/pages/DataStudio/model.ts +++ b/dinky-web/src/pages/DataStudio/model.ts @@ -429,7 +429,7 @@ const Model: ModelType = { *queryFlinkUdfOptions({ payload }, { call, put }) { const response: [] = yield call(getFlinkUdfOptions, payload); yield put({ - type: 'saveUdfOptions', + type: 'saveFlinkUdfOptions', payload: response }); }, @@ -641,7 +641,7 @@ const Model: ModelType = { saveFlinkUdfOptions(state, { payload }) { return { ...state, - udfOptions: payload + flinkUdfOptions: payload }; }, /** diff --git a/dinky-web/src/types/Studio/data.d.ts b/dinky-web/src/types/Studio/data.d.ts index d950bc1507..c3fac55225 100644 --- a/dinky-web/src/types/Studio/data.d.ts +++ b/dinky-web/src/types/Studio/data.d.ts @@ -37,7 +37,6 @@ export type Catalogue = { }; export type TaskUdfConfig = { - templateId: number; selectKeys: List; className: string; }; From 8d3be486ccfcf68d2cae69dd4670e74af419bdc5 Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Sat, 4 May 2024 21:55:50 +0800 Subject: [PATCH 05/47] feat: revert --- dinky-web/src/types/Studio/data.d.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dinky-web/src/types/Studio/data.d.ts b/dinky-web/src/types/Studio/data.d.ts index c3fac55225..0bc2104aa1 100644 --- a/dinky-web/src/types/Studio/data.d.ts +++ b/dinky-web/src/types/Studio/data.d.ts @@ -37,7 +37,9 @@ export type Catalogue = { }; export type TaskUdfConfig = { + templateId: number; selectKeys: List; + name: string; className: string; }; From 736c3c5c4b617c8959c6c0f19ea58989ada8ab84 Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Sun, 5 May 2024 00:08:09 +0800 Subject: [PATCH 06/47] feat: name assign from className --- .../dinky/data/model/ext/TaskExtConfig.java | 6 +++ .../dinky/data/model/ext/TaskUdfRefer.java | 22 +++++++++++ .../RightContainer/JobConfig/index.tsx | 38 ++++++++----------- dinky-web/src/types/Studio/data.d.ts | 7 +++- 4 files changed, 50 insertions(+), 23 deletions(-) create mode 100644 dinky-admin/src/main/java/org/dinky/data/model/ext/TaskUdfRefer.java diff --git a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java index 2c713bd4fd..283155d17d 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java @@ -49,6 +49,12 @@ public class TaskExtConfig implements Serializable { notes = "UDF (User-Defined Function) configuration for the task") private TaskUdfConfig udfConfig; + @ApiModelProperty( + value = "UDF Refer", + dataType = "TaskUdfRefer", + notes = "UDF (User-Defined Function) reference for the task") + private List udfRefer; + @ApiModelProperty( value = "Custom Config", dataType = "List", diff --git a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskUdfRefer.java b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskUdfRefer.java new file mode 100644 index 0000000000..aa70dcbd6a --- /dev/null +++ b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskUdfRefer.java @@ -0,0 +1,22 @@ +package org.dinky.data.model.ext; + +import io.swagger.annotations.ApiModel; +import io.swagger.annotations.ApiModelProperty; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.io.Serializable; + +@Data +@ApiModel(value = "TaskUdfRefer", description = "UDF (User-Defined Function) refer for Task") +@AllArgsConstructor +@NoArgsConstructor +public class TaskUdfRefer implements Serializable { + + @ApiModelProperty(value = "function name", dataType = "String", example = "add", notes = "Nmae of the UDF function") + private String name; + + @ApiModelProperty(value = "Class Name", dataType = "String", notes = "Name of the UDF class") + private String className; +} diff --git a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx index 6741e7a1d5..384f82d69e 100644 --- a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx +++ b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx @@ -56,6 +56,7 @@ import React from 'react'; import {useEffect, useState} from 'react'; import {connect} from 'umi'; import FlinkUdfOptionsSelect from "@/components/Flink/UdfSelect"; +import {ProFormDependency} from "@ant-design/pro-form"; const JobConfig = (props: any) => { const { @@ -311,7 +312,7 @@ const JobConfig = (props: any) => { { showSearch placeholder={l('pages.datastudio.label.udf.className')} options={flinkUdfOptions} - fieldProps={{ - onChange: (value: string) => { - const simpleClassName = value?.split('.')?.pop() ?? ''; - const lowerName = simpleClassName.charAt(0).toLowerCase() + simpleClassName.slice(1) - - form.setFieldsValue({ - 'configJson': { - 'udfConfig': [ - { - 'name': lowerName - } - ] - } - }); - } - }} - /> - + + {({className}) => { + const simpleClassName = className?.split('.')?.pop() ?? ''; + const lowerName = simpleClassName.charAt(0).toLowerCase() + simpleClassName.slice(1) + return ( + + ); + }} + diff --git a/dinky-web/src/types/Studio/data.d.ts b/dinky-web/src/types/Studio/data.d.ts index 0bc2104aa1..f58ff58392 100644 --- a/dinky-web/src/types/Studio/data.d.ts +++ b/dinky-web/src/types/Studio/data.d.ts @@ -39,16 +39,21 @@ export type Catalogue = { export type TaskUdfConfig = { templateId: number; selectKeys: List; - name: string; className: string; }; +export type TaskUdfRefer = { + name: string; + className: string; +} + export type ConfigItem = { key: string; value: string; }; export type TaskExtConfig = { + udfRefer: List; udfConfig: TaskUdfConfig; customConfig: List>; }; From 82be2b0b26b9d868b6c17314d24abf52154eef29 Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Sun, 5 May 2024 12:11:47 +0800 Subject: [PATCH 07/47] feat: pass to back --- .../main/java/org/dinky/data/dto/TaskDTO.java | 5 ++-- .../dinky/data/model/ext/TaskExtConfig.java | 14 ++++++++-- .../main/java/org/dinky/job/JobConfig.java | 7 +++++ .../RightContainer/JobConfig/index.tsx | 26 +++++++++---------- 4 files changed, 34 insertions(+), 18 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java index 192a077840..6b47bb8ce8 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java @@ -37,7 +37,6 @@ /** * StudioExecuteDTO - * */ @Getter @Setter @@ -219,10 +218,12 @@ public JobConfig getJobConfig() { Map parsedConfig = this.configJson == null ? new HashMap<>(0) : this.configJson.getCustomConfigMaps(); - + Map udfRefers = + this.configJson == null ? new HashMap<>(0) : this.configJson.getUdfReferMaps(); JobConfig jobConfig = new JobConfig(); BeanUtil.copyProperties(this, jobConfig); jobConfig.setConfigJson(parsedConfig); + jobConfig.setUdfRefer(udfRefers); jobConfig.setTaskId(id); jobConfig.setJobName(name); diff --git a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java index 283155d17d..1a069e14f8 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java @@ -81,8 +81,18 @@ public List getCustomConfigKeys() { public Map getCustomConfigMaps() { return Asserts.isNotNullCollection(customConfig) ? customConfig.stream() - .filter(item -> item.getKey() != null && item.getValue() != null) - .collect(Collectors.toMap(ConfigItem::getKey, ConfigItem::getValue)) + .filter(item -> item.getKey() != null && item.getValue() != null) + .collect(Collectors.toMap(ConfigItem::getKey, ConfigItem::getValue)) + : new HashMap<>(); + } + + // udfRefer-value的所有key-value + @JsonIgnore + public Map getUdfReferMaps() { + return Asserts.isNotNullCollection(udfRefer) + ? udfRefer.stream() + .filter(item -> item.getClassName() != null) + .collect(Collectors.toMap(TaskUdfRefer::getClassName, TaskUdfRefer::getName)) : new HashMap<>(); } diff --git a/dinky-core/src/main/java/org/dinky/job/JobConfig.java b/dinky-core/src/main/java/org/dinky/job/JobConfig.java index f5fd35a499..da480bb125 100644 --- a/dinky-core/src/main/java/org/dinky/job/JobConfig.java +++ b/dinky-core/src/main/java/org/dinky/job/JobConfig.java @@ -91,6 +91,13 @@ public class JobConfig { notes = "JSON configuration") private Map configJson; + @ApiModelProperty( + value = "UDF configuration", + dataType = "Map", + example = "{\"udf1\": \"value1\", \"udf2\": \"value2\"}", + notes = "UDF (User-Defined Function) configuration") + private Map udfRefer; + @ApiModelProperty( value = "Flag indicating whether to use the result", dataType = "boolean", diff --git a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx index 384f82d69e..c74e284516 100644 --- a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx +++ b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx @@ -329,21 +329,19 @@ const JobConfig = (props: any) => { showSearch placeholder={l('pages.datastudio.label.udf.className')} options={flinkUdfOptions} - /> - - {({className}) => { - const simpleClassName = className?.split('.')?.pop() ?? ''; - const lowerName = simpleClassName.charAt(0).toLowerCase() + simpleClassName.slice(1) - return ( - - ); + fieldProps={{ + onChange: (value: string) => { + const simpleClassName = value?.split('.')?.pop() ?? ''; + const lowerName = simpleClassName.charAt(0).toLowerCase() + simpleClassName.slice(1) + // 这个应该联动 + } }} - + /> + From 19ca45aef4903f3da6ca54cff5f0db23ecfd9250 Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Sun, 5 May 2024 14:29:59 +0800 Subject: [PATCH 08/47] feat: insert udfRefer to statement Signed-off-by: sunlichao11 --- .../java/org/dinky/data/model/ext/TaskExtConfig.java | 9 +++++++++ .../src/main/java/org/dinky/explainer/Explainer.java | 12 +++++++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java index 1a069e14f8..193a0543af 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java @@ -19,6 +19,7 @@ package org.dinky.data.model.ext; +import org.apache.commons.lang3.StringUtils; import org.dinky.assertion.Asserts; import org.dinky.data.ext.ConfigItem; @@ -92,6 +93,14 @@ public Map getUdfReferMaps() { return Asserts.isNotNullCollection(udfRefer) ? udfRefer.stream() .filter(item -> item.getClassName() != null) + .map(t -> { + if (StringUtils.isEmpty(t.getName())) { + String name = t.getClassName().substring(t.getClassName().lastIndexOf(".") + 1); + name = name.substring(0, 1).toLowerCase() + name.substring(1); + t.setName(name); + } + return t; + }) .collect(Collectors.toMap(TaskUdfRefer::getClassName, TaskUdfRefer::getName)) : new HashMap<>(); } diff --git a/dinky-core/src/main/java/org/dinky/explainer/Explainer.java b/dinky-core/src/main/java/org/dinky/explainer/Explainer.java index e0f9f9fd95..5053c61a15 100644 --- a/dinky-core/src/main/java/org/dinky/explainer/Explainer.java +++ b/dinky-core/src/main/java/org/dinky/explainer/Explainer.java @@ -58,8 +58,10 @@ import java.net.URL; import java.time.LocalDateTime; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.stream.Collectors; import com.fasterxml.jackson.databind.ObjectMapper; @@ -70,6 +72,7 @@ import cn.hutool.core.text.StrFormatter; import cn.hutool.core.util.StrUtil; import lombok.extern.slf4j.Slf4j; +import org.springframework.util.CollectionUtils; /** * Explainer @@ -114,7 +117,14 @@ public JobParam pretreatStatements(String[] statements) { List statementList = new ArrayList<>(); List udfList = new ArrayList<>(); StrBuilder parsedSql = new StrBuilder(); - for (String item : statements) { + + List statementsWithUdf = Arrays.stream(statements).collect(Collectors.toList()); + Optional.ofNullable(jobManager.getConfig().getUdfRefer()).ifPresent( t-> t.forEach((key, value) -> { + String sql = String.format("create temporary function %s as '%s'", value, key); + statementsWithUdf.add(0, sql); + })); + + for (String item : statementsWithUdf) { String statement = executor.pretreatStatement(item); parsedSql.append(statement).append(";\n"); if (statement.isEmpty()) { From be1675e27199e78c9440e4067dd63aef6e8614fa Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Sun, 5 May 2024 16:17:25 +0800 Subject: [PATCH 09/47] style: spotless apply Signed-off-by: sunlichao11 --- .../org/dinky/controller/JarController.java | 14 ++++------ .../main/java/org/dinky/data/dto/TaskDTO.java | 3 +- .../dinky/data/model/ext/TaskExtConfig.java | 28 ++++++++++--------- .../dinky/data/model/ext/TaskUdfRefer.java | 23 +++++++++++++-- .../dinky/service/impl/TaskServiceImpl.java | 2 +- .../main/java/org/dinky/utils/UDFUtils.java | 14 ++++------ .../org/dinky/cdc/doris/DorisSinkBuilder.java | 13 +++++---- .../org/dinky/cdc/doris/DorisSinkOptions.java | 6 ++-- .../java/org/dinky/explainer/Explainer.java | 10 +++---- 9 files changed, 64 insertions(+), 49 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/JarController.java b/dinky-admin/src/main/java/org/dinky/controller/JarController.java index 42e9cadab9..b854b1bde7 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JarController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JarController.java @@ -19,8 +19,6 @@ package org.dinky.controller; -import lombok.AllArgsConstructor; -import lombok.Data; import org.dinky.data.model.Task; import org.dinky.data.result.Result; import org.dinky.data.vo.CascaderVO; @@ -28,6 +26,7 @@ import org.dinky.function.data.model.UDF; import org.dinky.function.util.UDFUtil; import org.dinky.service.TaskService; +import org.dinky.utils.UDFUtils; import org.apache.flink.table.catalog.FunctionLanguage; @@ -36,7 +35,6 @@ import java.util.Map; import java.util.stream.Collectors; -import org.dinky.utils.UDFUtils; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; @@ -89,14 +87,12 @@ public Result>> generateJar() { @ApiOperation("Get UDFs") public Result> getUdfs() { List staticUdfs = UDFUtils.getStaticUdfs(); - List dynamicUdfs = taskService.getAllUdfEnabled().stream() - .map(UDFUtils::taskToUDF) - .collect(Collectors.toList()); + List dynamicUdfs = + taskService.getAllUdfEnabled().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); List allUdfs = new ArrayList<>(staticUdfs); allUdfs.addAll(dynamicUdfs); - List result = allUdfs.stream().map(udf -> new CascaderVO(udf.getClassName())) - .collect(Collectors.toList()); + List result = + allUdfs.stream().map(udf -> new CascaderVO(udf.getClassName())).collect(Collectors.toList()); return Result.succeed(result); } - } diff --git a/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java b/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java index 6b47bb8ce8..60a01f05ec 100644 --- a/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java +++ b/dinky-admin/src/main/java/org/dinky/data/dto/TaskDTO.java @@ -218,8 +218,7 @@ public JobConfig getJobConfig() { Map parsedConfig = this.configJson == null ? new HashMap<>(0) : this.configJson.getCustomConfigMaps(); - Map udfRefers = - this.configJson == null ? new HashMap<>(0) : this.configJson.getUdfReferMaps(); + Map udfRefers = this.configJson == null ? new HashMap<>(0) : this.configJson.getUdfReferMaps(); JobConfig jobConfig = new JobConfig(); BeanUtil.copyProperties(this, jobConfig); jobConfig.setConfigJson(parsedConfig); diff --git a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java index 193a0543af..2915f8a460 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java @@ -19,10 +19,11 @@ package org.dinky.data.model.ext; -import org.apache.commons.lang3.StringUtils; import org.dinky.assertion.Asserts; import org.dinky.data.ext.ConfigItem; +import org.apache.commons.lang3.StringUtils; + import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; @@ -82,8 +83,8 @@ public List getCustomConfigKeys() { public Map getCustomConfigMaps() { return Asserts.isNotNullCollection(customConfig) ? customConfig.stream() - .filter(item -> item.getKey() != null && item.getValue() != null) - .collect(Collectors.toMap(ConfigItem::getKey, ConfigItem::getValue)) + .filter(item -> item.getKey() != null && item.getValue() != null) + .collect(Collectors.toMap(ConfigItem::getKey, ConfigItem::getValue)) : new HashMap<>(); } @@ -92,16 +93,17 @@ public Map getCustomConfigMaps() { public Map getUdfReferMaps() { return Asserts.isNotNullCollection(udfRefer) ? udfRefer.stream() - .filter(item -> item.getClassName() != null) - .map(t -> { - if (StringUtils.isEmpty(t.getName())) { - String name = t.getClassName().substring(t.getClassName().lastIndexOf(".") + 1); - name = name.substring(0, 1).toLowerCase() + name.substring(1); - t.setName(name); - } - return t; - }) - .collect(Collectors.toMap(TaskUdfRefer::getClassName, TaskUdfRefer::getName)) + .filter(item -> item.getClassName() != null) + .map(t -> { + if (StringUtils.isEmpty(t.getName())) { + String name = t.getClassName() + .substring(t.getClassName().lastIndexOf(".") + 1); + name = name.substring(0, 1).toLowerCase() + name.substring(1); + t.setName(name); + } + return t; + }) + .collect(Collectors.toMap(TaskUdfRefer::getClassName, TaskUdfRefer::getName)) : new HashMap<>(); } diff --git a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskUdfRefer.java b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskUdfRefer.java index aa70dcbd6a..1817cfdcda 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskUdfRefer.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskUdfRefer.java @@ -1,13 +1,32 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + package org.dinky.data.model.ext; +import java.io.Serializable; + import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; -import java.io.Serializable; - @Data @ApiModel(value = "TaskUdfRefer", description = "UDF (User-Defined Function) refer for Task") @AllArgsConstructor diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java index 043a972466..a1c49545e2 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java @@ -705,7 +705,7 @@ public JobModelOverview getJobStreamingOrBatchModelOverview() { } @Override - public List getAllUdfEnabled(){ + public List getAllUdfEnabled() { return list(new QueryWrapper() .in("dialect", Dialect.JAVA.getValue(), Dialect.SCALA.getValue(), Dialect.PYTHON.getValue()) .eq("enabled", 1)); diff --git a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java index 87dbecdd99..95824e82e7 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java +++ b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java @@ -19,21 +19,22 @@ package org.dinky.utils; -import org.apache.flink.api.common.functions.Function; import org.dinky.assertion.Asserts; import org.dinky.data.exception.BusException; import org.dinky.data.model.Task; import org.dinky.function.data.model.UDF; import org.dinky.function.util.UDFUtil; +import org.apache.flink.api.common.functions.Function; import org.apache.flink.table.catalog.FunctionLanguage; -import org.reflections.Reflections; -import org.reflections.scanners.Scanners; import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import org.reflections.Reflections; +import org.reflections.scanners.Scanners; + public class UDFUtils extends UDFUtil { public static UDF taskToUDF(Task task) { @@ -55,15 +56,12 @@ public static List getStaticUdfs() { reflections.get(Scanners.SubTypes.of(Function.class).asClass()); return operations.stream() - .filter(operation -> !operation.isInterface() - && !operation.getName - ().startsWith("org.apache.flink")) + .filter(operation -> + !operation.isInterface() && !operation.getName().startsWith("org.apache.flink")) .map(operation -> UDF.builder() .className(operation.getName()) .functionLanguage(FunctionLanguage.JAVA) .build()) .collect(Collectors.toList()); } - - } diff --git a/dinky-cdc/dinky-cdc-plus/src/main/java/org/dinky/cdc/doris/DorisSinkBuilder.java b/dinky-cdc/dinky-cdc-plus/src/main/java/org/dinky/cdc/doris/DorisSinkBuilder.java index 93bbd89d6f..d384e1f69d 100644 --- a/dinky-cdc/dinky-cdc-plus/src/main/java/org/dinky/cdc/doris/DorisSinkBuilder.java +++ b/dinky-cdc/dinky-cdc-plus/src/main/java/org/dinky/cdc/doris/DorisSinkBuilder.java @@ -19,6 +19,12 @@ package org.dinky.cdc.doris; +import org.dinky.assertion.Asserts; +import org.dinky.cdc.AbstractSinkBuilder; +import org.dinky.cdc.SinkBuilder; +import org.dinky.data.model.FlinkCDCConfig; +import org.dinky.data.model.Table; + import org.apache.doris.flink.cfg.DorisExecutionOptions; import org.apache.doris.flink.cfg.DorisOptions; import org.apache.doris.flink.cfg.DorisReadOptions; @@ -30,11 +36,6 @@ import org.apache.flink.table.types.DataType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.utils.TypeConversions; -import org.dinky.assertion.Asserts; -import org.dinky.cdc.AbstractSinkBuilder; -import org.dinky.cdc.SinkBuilder; -import org.dinky.data.model.FlinkCDCConfig; -import org.dinky.data.model.Table; import java.io.Serializable; import java.util.ArrayList; @@ -183,7 +184,7 @@ public void addSink( DorisSink.Builder builder = DorisSink.builder(); builder.setDorisReadOptions(readOptionBuilder.build()) .setDorisExecutionOptions(executionBuilder.build()) - .setSerializer( RowDataSerializer.builder() + .setSerializer(RowDataSerializer.builder() .setFieldNames(columnNames) .setType("json") .enableDelete(true) diff --git a/dinky-cdc/dinky-cdc-plus/src/main/java/org/dinky/cdc/doris/DorisSinkOptions.java b/dinky-cdc/dinky-cdc-plus/src/main/java/org/dinky/cdc/doris/DorisSinkOptions.java index 383e1d3b65..dd6cb61553 100644 --- a/dinky-cdc/dinky-cdc-plus/src/main/java/org/dinky/cdc/doris/DorisSinkOptions.java +++ b/dinky-cdc/dinky-cdc-plus/src/main/java/org/dinky/cdc/doris/DorisSinkOptions.java @@ -119,11 +119,11 @@ public class DorisSinkOptions { .defaultValue(1) .withDescription("In the 2pc scenario, the number of retries after the commit phase fails."); - public static final ConfigOption SINK_USE_NEW_SCHEMA_CHANGE = ConfigOptions.key("sink.use-new-schema-change") + public static final ConfigOption SINK_USE_NEW_SCHEMA_CHANGE = ConfigOptions.key( + "sink.use-new-schema-change") .booleanType() .defaultValue(false) .withDescription( "supports table column name, column type, default, comment synchronization, supports multi-column changes, " - +"and supports column name rename. Need to be enabled by configuring use-new-schema-change."); - + + "and supports column name rename. Need to be enabled by configuring use-new-schema-change."); } diff --git a/dinky-core/src/main/java/org/dinky/explainer/Explainer.java b/dinky-core/src/main/java/org/dinky/explainer/Explainer.java index 5053c61a15..4880289439 100644 --- a/dinky-core/src/main/java/org/dinky/explainer/Explainer.java +++ b/dinky-core/src/main/java/org/dinky/explainer/Explainer.java @@ -72,7 +72,6 @@ import cn.hutool.core.text.StrFormatter; import cn.hutool.core.util.StrUtil; import lombok.extern.slf4j.Slf4j; -import org.springframework.util.CollectionUtils; /** * Explainer @@ -119,10 +118,11 @@ public JobParam pretreatStatements(String[] statements) { StrBuilder parsedSql = new StrBuilder(); List statementsWithUdf = Arrays.stream(statements).collect(Collectors.toList()); - Optional.ofNullable(jobManager.getConfig().getUdfRefer()).ifPresent( t-> t.forEach((key, value) -> { - String sql = String.format("create temporary function %s as '%s'", value, key); - statementsWithUdf.add(0, sql); - })); + Optional.ofNullable(jobManager.getConfig().getUdfRefer()) + .ifPresent(t -> t.forEach((key, value) -> { + String sql = String.format("create temporary function %s as '%s'", value, key); + statementsWithUdf.add(0, sql); + })); for (String item : statementsWithUdf) { String statement = executor.pretreatStatement(item); From 517671decfbdb2a29ae41686444a23de3ecd9683 Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Sun, 5 May 2024 17:23:48 +0800 Subject: [PATCH 10/47] refactor: move staticUdf function --- .../org/dinky/controller/JarController.java | 2 +- .../main/java/org/dinky/utils/UDFUtils.java | 23 ------------------- .../java/org/dinky/function/util/UDFUtil.java | 16 +++++++++++++ 3 files changed, 17 insertions(+), 24 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/JarController.java b/dinky-admin/src/main/java/org/dinky/controller/JarController.java index b854b1bde7..7b28d9ca23 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JarController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JarController.java @@ -86,7 +86,7 @@ public Result>> generateJar() { @GetMapping("/udf/geUdfs") @ApiOperation("Get UDFs") public Result> getUdfs() { - List staticUdfs = UDFUtils.getStaticUdfs(); + List staticUdfs = UDFUtil.getStaticUdfs(); List dynamicUdfs = taskService.getAllUdfEnabled().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); List allUdfs = new ArrayList<>(staticUdfs); diff --git a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java index 95824e82e7..1e771df27e 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java +++ b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java @@ -25,16 +25,8 @@ import org.dinky.function.data.model.UDF; import org.dinky.function.util.UDFUtil; -import org.apache.flink.api.common.functions.Function; import org.apache.flink.table.catalog.FunctionLanguage; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -import org.reflections.Reflections; -import org.reflections.scanners.Scanners; - public class UDFUtils extends UDFUtil { public static UDF taskToUDF(Task task) { @@ -49,19 +41,4 @@ public static UDF taskToUDF(Task task) { throw new BusException("udf `class` config is null,please check your udf task config"); } } - - public static List getStaticUdfs() { - Reflections reflections = new Reflections(Function.class.getPackage().getName()); - Set> operations = - reflections.get(Scanners.SubTypes.of(Function.class).asClass()); - - return operations.stream() - .filter(operation -> - !operation.isInterface() && !operation.getName().startsWith("org.apache.flink")) - .map(operation -> UDF.builder() - .className(operation.getName()) - .functionLanguage(FunctionLanguage.JAVA) - .build()) - .collect(Collectors.toList()); - } } diff --git a/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java b/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java index 427b1dec2f..696b5ed287 100644 --- a/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java +++ b/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java @@ -38,6 +38,7 @@ import org.dinky.pool.ClassPool; import org.dinky.utils.URLUtils; +import org.apache.flink.api.common.functions.Function; import org.apache.flink.client.python.PythonFunctionFactory; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.PipelineOptions; @@ -473,4 +474,19 @@ public static void writeManifest( JSONUtil.toJsonStr(flinkUdfManifest), PathConstant.getUdfPackagePath(taskId) + PathConstant.DEP_MANIFEST); } + + public static List getStaticUdfs() { + Reflections reflections = new Reflections(Function.class.getPackage().getName()); + Set> operations = + reflections.get(Scanners.SubTypes.of(Function.class).asClass()); + + return operations.stream() + .filter(operation -> + !operation.isInterface() && !operation.getName().startsWith("org.apache.flink")) + .map(operation -> UDF.builder() + .className(operation.getName()) + .functionLanguage(FunctionLanguage.JAVA) + .build()) + .collect(Collectors.toList()); + } } From eb9cbf8d582d8d5a5c0c15bb1e965ad1d1222c4a Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Sun, 5 May 2024 18:12:20 +0800 Subject: [PATCH 11/47] refactor: move staticUdf function --- .../org/dinky/controller/JarController.java | 3 ++- .../main/java/org/dinky/trans/Operations.java | 21 +++++++++++++++++++ .../java/org/dinky/function/util/UDFUtil.java | 16 -------------- 3 files changed, 23 insertions(+), 17 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/JarController.java b/dinky-admin/src/main/java/org/dinky/controller/JarController.java index 7b28d9ca23..08e01a92eb 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JarController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JarController.java @@ -26,6 +26,7 @@ import org.dinky.function.data.model.UDF; import org.dinky.function.util.UDFUtil; import org.dinky.service.TaskService; +import org.dinky.trans.Operations; import org.dinky.utils.UDFUtils; import org.apache.flink.table.catalog.FunctionLanguage; @@ -86,7 +87,7 @@ public Result>> generateJar() { @GetMapping("/udf/geUdfs") @ApiOperation("Get UDFs") public Result> getUdfs() { - List staticUdfs = UDFUtil.getStaticUdfs(); + List staticUdfs = Operations.getStaticUdfs(); List dynamicUdfs = taskService.getAllUdfEnabled().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); List allUdfs = new ArrayList<>(staticUdfs); diff --git a/dinky-core/src/main/java/org/dinky/trans/Operations.java b/dinky-core/src/main/java/org/dinky/trans/Operations.java index a65a344520..97758dbf1d 100644 --- a/dinky-core/src/main/java/org/dinky/trans/Operations.java +++ b/dinky-core/src/main/java/org/dinky/trans/Operations.java @@ -19,12 +19,18 @@ package org.dinky.trans; +import org.dinky.function.data.model.UDF; import org.dinky.parser.SqlType; +import org.apache.flink.api.common.functions.Function; +import org.apache.flink.table.catalog.FunctionLanguage; + import java.lang.reflect.InvocationTargetException; import java.util.Arrays; +import java.util.List; import java.util.Objects; import java.util.Set; +import java.util.stream.Collectors; import org.reflections.Reflections; import org.reflections.scanners.Scanners; @@ -95,4 +101,19 @@ public static Operation buildOperation(String statement) { .map(p -> p.create(statement)) .orElse(null); } + + public static List getStaticUdfs() { + Reflections reflections = new Reflections(Function.class.getPackage().getName()); + Set> operations = + reflections.get(Scanners.SubTypes.of(Function.class).asClass()); + + return operations.stream() + .filter(operation -> + !operation.isInterface() && !operation.getName().startsWith("org.apache.flink")) + .map(operation -> UDF.builder() + .className(operation.getName()) + .functionLanguage(FunctionLanguage.JAVA) + .build()) + .collect(Collectors.toList()); + } } diff --git a/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java b/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java index 696b5ed287..427b1dec2f 100644 --- a/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java +++ b/dinky-function/src/main/java/org/dinky/function/util/UDFUtil.java @@ -38,7 +38,6 @@ import org.dinky.pool.ClassPool; import org.dinky.utils.URLUtils; -import org.apache.flink.api.common.functions.Function; import org.apache.flink.client.python.PythonFunctionFactory; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.PipelineOptions; @@ -474,19 +473,4 @@ public static void writeManifest( JSONUtil.toJsonStr(flinkUdfManifest), PathConstant.getUdfPackagePath(taskId) + PathConstant.DEP_MANIFEST); } - - public static List getStaticUdfs() { - Reflections reflections = new Reflections(Function.class.getPackage().getName()); - Set> operations = - reflections.get(Scanners.SubTypes.of(Function.class).asClass()); - - return operations.stream() - .filter(operation -> - !operation.isInterface() && !operation.getName().startsWith("org.apache.flink")) - .map(operation -> UDF.builder() - .className(operation.getName()) - .functionLanguage(FunctionLanguage.JAVA) - .build()) - .collect(Collectors.toList()); - } } From 59bc0dd8a1224cdd98e75c7fa836d6dccb8931f2 Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Sun, 5 May 2024 22:56:33 +0800 Subject: [PATCH 12/47] feat: add java custom udf cache --- .../src/main/java/org/dinky/controller/JarController.java | 2 +- dinky-core/src/main/java/org/dinky/trans/Operations.java | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/JarController.java b/dinky-admin/src/main/java/org/dinky/controller/JarController.java index 08e01a92eb..8e7a3469e9 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JarController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JarController.java @@ -87,7 +87,7 @@ public Result>> generateJar() { @GetMapping("/udf/geUdfs") @ApiOperation("Get UDFs") public Result> getUdfs() { - List staticUdfs = Operations.getStaticUdfs(); + List staticUdfs = Operations.getCustomStaticUdfs(); List dynamicUdfs = taskService.getAllUdfEnabled().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); List allUdfs = new ArrayList<>(staticUdfs); diff --git a/dinky-core/src/main/java/org/dinky/trans/Operations.java b/dinky-core/src/main/java/org/dinky/trans/Operations.java index 97758dbf1d..c8c7a9d7c9 100644 --- a/dinky-core/src/main/java/org/dinky/trans/Operations.java +++ b/dinky-core/src/main/java/org/dinky/trans/Operations.java @@ -19,6 +19,7 @@ package org.dinky.trans; +import org.apache.commons.collections.CollectionUtils; import org.dinky.function.data.model.UDF; import org.dinky.parser.SqlType; @@ -51,6 +52,7 @@ private Operations() {} private static final Operation[] ALL_OPERATIONS = getAllOperations(); + private static final List JAVA_STATIC_UDF_LIST = getCustomStaticUdfs(); /** * get all {@link Operation} children ordinary class, * @@ -102,7 +104,11 @@ public static Operation buildOperation(String statement) { .orElse(null); } - public static List getStaticUdfs() { + public static List getCustomStaticUdfs() { + if (CollectionUtils.isNotEmpty(JAVA_STATIC_UDF_LIST )) { + return JAVA_STATIC_UDF_LIST; + } + Reflections reflections = new Reflections(Function.class.getPackage().getName()); Set> operations = reflections.get(Scanners.SubTypes.of(Function.class).asClass()); From aeb10d360fd2176d696ff40eeb20c04c3ff94f1a Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Mon, 6 May 2024 21:56:57 +0800 Subject: [PATCH 13/47] feat: change udf combine Signed-off-by: sunlichao11 --- .../RightContainer/JobConfig/index.tsx | 66 ++++++++++++------- 1 file changed, 41 insertions(+), 25 deletions(-) diff --git a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx index c74e284516..ca5a8280cc 100644 --- a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx +++ b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx @@ -319,31 +319,47 @@ const JobConfig = (props: any) => { creatorButtonText: l('pages.datastudio.label.udf.injectUdf') }} > - - - { - const simpleClassName = value?.split('.')?.pop() ?? ''; - const lowerName = simpleClassName.charAt(0).toLowerCase() + simpleClassName.slice(1) - // 这个应该联动 - } - }} - /> - - - + {( + _, index + ) => { + return ( + + + { + const simpleClassName = value?.split('.')?.pop() ?? ''; + const lowerName = simpleClassName.charAt(0).toLowerCase() + simpleClassName.slice(1) + + form.setFieldsValue({ + 'configJson': { + 'udfRefer': { + [index]: { + name: lowerName + } + } + } + }); + } + }} + /> + + + + ); + } + }
From ebf9d5b7359a54a3450d0fa9a97e2b9a8e65f727 Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Tue, 7 May 2024 07:35:25 +0800 Subject: [PATCH 14/47] feat: add scan all UserDefinedFunction sub class --- .../src/main/java/org/dinky/trans/Operations.java | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/dinky-core/src/main/java/org/dinky/trans/Operations.java b/dinky-core/src/main/java/org/dinky/trans/Operations.java index c8c7a9d7c9..95d9748209 100644 --- a/dinky-core/src/main/java/org/dinky/trans/Operations.java +++ b/dinky-core/src/main/java/org/dinky/trans/Operations.java @@ -20,7 +20,9 @@ package org.dinky.trans; import org.apache.commons.collections.CollectionUtils; +import org.apache.flink.table.functions.UserDefinedFunction; import org.dinky.function.data.model.UDF; +import org.dinky.function.udtaf.Top2; import org.dinky.parser.SqlType; import org.apache.flink.api.common.functions.Function; @@ -28,6 +30,7 @@ import java.lang.reflect.InvocationTargetException; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; @@ -37,6 +40,8 @@ import org.reflections.scanners.Scanners; import lombok.extern.slf4j.Slf4j; +import org.reflections.util.ClasspathHelper; +import org.reflections.util.ConfigurationBuilder; /** * Operations @@ -109,13 +114,12 @@ public static List getCustomStaticUdfs() { return JAVA_STATIC_UDF_LIST; } - Reflections reflections = new Reflections(Function.class.getPackage().getName()); + Reflections reflections = new Reflections(new ConfigurationBuilder().setUrls(ClasspathHelper.forJavaClassPath())); Set> operations = - reflections.get(Scanners.SubTypes.of(Function.class).asClass()); - + reflections.get(Scanners.SubTypes.of(UserDefinedFunction.class).asClass()); return operations.stream() .filter(operation -> - !operation.isInterface() && !operation.getName().startsWith("org.apache.flink")) + !operation.isInterface() && !operation.getName().startsWith("org.apache")) .map(operation -> UDF.builder() .className(operation.getName()) .functionLanguage(FunctionLanguage.JAVA) From 6507ddee79d44e239d4289b588e95dd6aa98e9bc Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Tue, 7 May 2024 07:41:50 +0800 Subject: [PATCH 15/47] feat: switch server and cache --- .../DataStudio/MiddleContainer/StudioEditor/constants.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dinky-web/src/pages/DataStudio/MiddleContainer/StudioEditor/constants.tsx b/dinky-web/src/pages/DataStudio/MiddleContainer/StudioEditor/constants.tsx index f4a693d8d8..2f4ae64542 100644 --- a/dinky-web/src/pages/DataStudio/MiddleContainer/StudioEditor/constants.tsx +++ b/dinky-web/src/pages/DataStudio/MiddleContainer/StudioEditor/constants.tsx @@ -21,8 +21,8 @@ import { l } from '@/utils/intl'; export const PARAM_DIFF_TABLE_COL = [ { title: l('pages.datastudio.sql.configItem'), key: 'key', dataIndex: 'key' }, - { title: l('pages.datastudio.sql.cacheConfigItem'), key: 'cache', dataIndex: 'cache' }, - { title: l('pages.datastudio.sql.serverConfigItem'), key: 'server', dataIndex: 'server' } + { title: l('pages.datastudio.sql.serverConfigItem'), key: 'server', dataIndex: 'server' }, + { title: l('pages.datastudio.sql.cacheConfigItem'), key: 'cache', dataIndex: 'cache' } ]; export const DIFF_EDITOR_PARAMS = { From d6101b9eb7d3dacf4135f083a791ddb274c9a206 Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Tue, 7 May 2024 09:05:16 +0800 Subject: [PATCH 16/47] feat: switch server and cache --- .../src/pages/DataStudio/RightContainer/JobConfig/index.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx index ca5a8280cc..b3bdb9f0d2 100644 --- a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx +++ b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx @@ -56,7 +56,6 @@ import React from 'react'; import {useEffect, useState} from 'react'; import {connect} from 'umi'; import FlinkUdfOptionsSelect from "@/components/Flink/UdfSelect"; -import {ProFormDependency} from "@ant-design/pro-form"; const JobConfig = (props: any) => { const { From e1f13653ca62b4fb3b5291d4194f59442fa2f592 Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Tue, 7 May 2024 09:17:03 +0800 Subject: [PATCH 17/47] style: spotless apply --- .../org/dinky/data/result/ResultRunnable.java | 2 +- .../main/java/org/dinky/trans/Operations.java | 16 +++++++--------- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/dinky-core/src/main/java/org/dinky/data/result/ResultRunnable.java b/dinky-core/src/main/java/org/dinky/data/result/ResultRunnable.java index ef62116069..7b2b2c2417 100644 --- a/dinky-core/src/main/java/org/dinky/data/result/ResultRunnable.java +++ b/dinky-core/src/main/java/org/dinky/data/result/ResultRunnable.java @@ -61,7 +61,7 @@ public ResultRunnable( Integer maxRowNum, boolean isChangeLog, boolean isAutoCancel, - String timeZone) { + String timeZone) { this.tableResult = tableResult; this.id = id; this.maxRowNum = maxRowNum; diff --git a/dinky-core/src/main/java/org/dinky/trans/Operations.java b/dinky-core/src/main/java/org/dinky/trans/Operations.java index 95d9748209..f4984f416e 100644 --- a/dinky-core/src/main/java/org/dinky/trans/Operations.java +++ b/dinky-core/src/main/java/org/dinky/trans/Operations.java @@ -19,18 +19,15 @@ package org.dinky.trans; -import org.apache.commons.collections.CollectionUtils; -import org.apache.flink.table.functions.UserDefinedFunction; import org.dinky.function.data.model.UDF; -import org.dinky.function.udtaf.Top2; import org.dinky.parser.SqlType; -import org.apache.flink.api.common.functions.Function; +import org.apache.commons.collections.CollectionUtils; import org.apache.flink.table.catalog.FunctionLanguage; +import org.apache.flink.table.functions.UserDefinedFunction; import java.lang.reflect.InvocationTargetException; import java.util.Arrays; -import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; @@ -38,11 +35,11 @@ import org.reflections.Reflections; import org.reflections.scanners.Scanners; - -import lombok.extern.slf4j.Slf4j; import org.reflections.util.ClasspathHelper; import org.reflections.util.ConfigurationBuilder; +import lombok.extern.slf4j.Slf4j; + /** * Operations * @@ -110,11 +107,12 @@ public static Operation buildOperation(String statement) { } public static List getCustomStaticUdfs() { - if (CollectionUtils.isNotEmpty(JAVA_STATIC_UDF_LIST )) { + if (CollectionUtils.isNotEmpty(JAVA_STATIC_UDF_LIST)) { return JAVA_STATIC_UDF_LIST; } - Reflections reflections = new Reflections(new ConfigurationBuilder().setUrls(ClasspathHelper.forJavaClassPath())); + Reflections reflections = + new Reflections(new ConfigurationBuilder().setUrls(ClasspathHelper.forJavaClassPath())); Set> operations = reflections.get(Scanners.SubTypes.of(UserDefinedFunction.class).asClass()); return operations.stream() From 7fbbe1c96d5bf96b1b4616fbb791169f5d75ac0b Mon Sep 17 00:00:00 2001 From: sunlichao11 Date: Tue, 7 May 2024 09:48:06 +0800 Subject: [PATCH 18/47] chore: change pages.datastudio.label.udf.tip --- dinky-web/src/locales/en-US/pages.ts | 2 +- dinky-web/src/locales/zh-CN/pages.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dinky-web/src/locales/en-US/pages.ts b/dinky-web/src/locales/en-US/pages.ts index 53820ebd59..5032eacb2d 100644 --- a/dinky-web/src/locales/en-US/pages.ts +++ b/dinky-web/src/locales/en-US/pages.ts @@ -440,7 +440,7 @@ export default { 'pages.datastudio.label.jobConfig.addConfig.params': 'parameters', 'pages.datastudio.label.jobConfig.addConfig.value': 'value', 'pages.datastudio.label.udf': 'Udf Item', - 'pages.datastudio.label.udf.tip': 'Inject UDF item', + 'pages.datastudio.label.udf.tip': 'Inject UDF item, Automatically add statement `create temporary function {functionName} as {className}` at the beginning of the SQL statement', 'pages.datastudio.label.udf.injectUdf': 'Inject UDF item', 'pages.datastudio.label.udf.name': 'function name', 'pages.datastudio.label.udf.className': 'class name', diff --git a/dinky-web/src/locales/zh-CN/pages.ts b/dinky-web/src/locales/zh-CN/pages.ts index f70837e4d8..8925cceaa6 100644 --- a/dinky-web/src/locales/zh-CN/pages.ts +++ b/dinky-web/src/locales/zh-CN/pages.ts @@ -421,7 +421,7 @@ export default { 'pages.datastudio.label.jobConfig.addConfig.params': '参数', 'pages.datastudio.label.jobConfig.addConfig.value': '值', 'pages.datastudio.label.udf': '注入UDF算子', - 'pages.datastudio.label.udf.tip': '自动注入UDF算子', + 'pages.datastudio.label.udf.tip': '注入UDF算子, 自动在所有语句前注入`create temporary function {functionName} as {className}` 语句', 'pages.datastudio.label.udf.injectUdf': '注入UDF', 'pages.datastudio.label.udf.name': '函数名称', 'pages.datastudio.label.udf.className': '类名', From 66157376321074008759b6790264c484f777593d Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Tue, 7 May 2024 12:41:27 +0800 Subject: [PATCH 19/47] Implement automatic injection and detection of duplicate injections Signed-off-by: Zzm0809 <934230207@qq.com> --- .../dinky/data/model/ext/TaskExtConfig.java | 2 +- dinky-web/src/locales/en-US/pages.ts | 1 + dinky-web/src/locales/zh-CN/pages.ts | 1 + .../RightContainer/JobConfig/index.tsx | 134 ++++++++++++++---- 4 files changed, 106 insertions(+), 32 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java index 2915f8a460..df437da715 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskExtConfig.java @@ -103,7 +103,7 @@ public Map getUdfReferMaps() { } return t; }) - .collect(Collectors.toMap(TaskUdfRefer::getClassName, TaskUdfRefer::getName)) + .collect(Collectors.toConcurrentMap(TaskUdfRefer::getClassName, TaskUdfRefer::getName)) : new HashMap<>(); } diff --git a/dinky-web/src/locales/en-US/pages.ts b/dinky-web/src/locales/en-US/pages.ts index 5032eacb2d..332eed8079 100644 --- a/dinky-web/src/locales/en-US/pages.ts +++ b/dinky-web/src/locales/en-US/pages.ts @@ -441,6 +441,7 @@ export default { 'pages.datastudio.label.jobConfig.addConfig.value': 'value', 'pages.datastudio.label.udf': 'Udf Item', 'pages.datastudio.label.udf.tip': 'Inject UDF item, Automatically add statement `create temporary function {functionName} as {className}` at the beginning of the SQL statement', + 'pages.datastudio.label.udf.duplicate.tip': 'The class [{className}] selected this time already exists and duplicate injection is not allowed. Please reselect or cancel injection (delete and change line).', 'pages.datastudio.label.udf.injectUdf': 'Inject UDF item', 'pages.datastudio.label.udf.name': 'function name', 'pages.datastudio.label.udf.className': 'class name', diff --git a/dinky-web/src/locales/zh-CN/pages.ts b/dinky-web/src/locales/zh-CN/pages.ts index 8925cceaa6..a9aac009ac 100644 --- a/dinky-web/src/locales/zh-CN/pages.ts +++ b/dinky-web/src/locales/zh-CN/pages.ts @@ -422,6 +422,7 @@ export default { 'pages.datastudio.label.jobConfig.addConfig.value': '值', 'pages.datastudio.label.udf': '注入UDF算子', 'pages.datastudio.label.udf.tip': '注入UDF算子, 自动在所有语句前注入`create temporary function {functionName} as {className}` 语句', + 'pages.datastudio.label.udf.duplicate.tip': '此次选择的类[{className}]已经存在,不允许重复注入,请重新选择,或者取消注入(删除改行即可)。', 'pages.datastudio.label.udf.injectUdf': '注入UDF', 'pages.datastudio.label.udf.name': '函数名称', 'pages.datastudio.label.udf.className': '类名', diff --git a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx index b3bdb9f0d2..7fd16c6436 100644 --- a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx +++ b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx @@ -19,11 +19,7 @@ import FlinkOptionsSelect from '@/components/Flink/OptionsSelect'; import {SAVE_POINT_TYPE} from '@/pages/DataStudio/constants'; -import { - getCurrentData, - getCurrentTab, - isDataStudioTabsItemType -} from '@/pages/DataStudio/function'; +import {getCurrentData, getCurrentTab, isDataStudioTabsItemType} from '@/pages/DataStudio/function'; import {StateType, STUDIO_MODEL, STUDIO_MODEL_ASYNC} from '@/pages/DataStudio/model'; import { buildAlertGroupOptions, @@ -36,7 +32,7 @@ import { isCanRenderClusterInstance } from '@/pages/DataStudio/RightContainer/JobConfig/function'; import {JOB_LIFE_CYCLE} from '@/pages/DevOps/constants'; -import {AlertStateType, ALERT_MODEL_ASYNC} from '@/pages/RegCenter/Alert/AlertInstance/model'; +import {ALERT_MODEL_ASYNC, AlertStateType} from '@/pages/RegCenter/Alert/AlertInstance/model'; import {DIALECT, RUN_MODE, SWITCH_OPTIONS} from '@/services/constants'; import {l} from '@/utils/intl'; import {InfoCircleOutlined} from '@ant-design/icons'; @@ -49,13 +45,14 @@ import { ProFormSwitch, ProFormText } from '@ant-design/pro-components'; -import {Alert, Space} from 'antd'; +import {Alert, Input, Space} from 'antd'; import {useForm} from 'antd/es/form/Form'; import {debounce} from 'lodash'; -import React from 'react'; -import {useEffect, useState} from 'react'; +import React, {useEffect, useState} from 'react'; import {connect} from 'umi'; import FlinkUdfOptionsSelect from "@/components/Flink/UdfSelect"; +import {TaskUdfRefer} from "@/types/Studio/data"; +import {ErrorMessageAsync} from "@/utils/messages"; const JobConfig = (props: any) => { const { @@ -76,6 +73,14 @@ const JobConfig = (props: any) => { const [selectRunMode, setSelectRunMode] = useState(current?.type); + const [currentSelectUdfIndexMap, setCurrentSelectUdfIndexMap] = useState>( + new Map(current?.configJson?.udfRefer?.map((item: TaskUdfRefer, index: number) => [index, item]) ?? []) + ); + + useEffect(() => { + + }, [currentSelectUdfIndexMap]); + useEffect(() => { dispatch({ type: STUDIO_MODEL_ASYNC.queryFlinkConfigOptions @@ -116,6 +121,83 @@ const JobConfig = (props: any) => { }); }; + + /** + * 处理 selectUdfIndexMap 的状态 | process the state of selectUdfIndexMap + * @param index + * @param className + * @param name + */ + function processSelectUdfMapState(index: number, className: string = '', name: string = '') { + setCurrentSelectUdfIndexMap(prevState => { + const newState = new Map(prevState); + newState.set(index, { + className: className, + name: name + }); + return newState; + }); + } + + + const handleClassChange = async (value: string, index: number) => { + // 检测 这个值是否已经存在 currentSelectUdfIndexMap 的 map 中 || check if the value already exists in the map of currentSelectUdfIndexMap + const values = currentSelectUdfIndexMap.values(); + for (const taskUdfRefer of values) { + if (taskUdfRefer?.className === value) { + await ErrorMessageAsync(l('pages.datastudio.label.udf.duplicate.tip', '', {className: value}), 3); + // clear the value of the form + form.setFieldsValue({ + 'configJson': { + 'udfRefer': { + [index]: { + className: '', + name: '' + } + } + } + }); + return; + } + } + const simpleClassName = value?.split('.')?.pop() ?? ''; + const lowerName = simpleClassName.charAt(0).toLowerCase() + simpleClassName.slice(1); + processSelectUdfMapState(index, value, lowerName); + form.setFieldsValue({ + 'configJson': { + 'udfRefer': { + [index]: { + className: value, + name: lowerName + } + } + } + }); + }; + + + function handleNameChange(name: string, index: number) { + // 拿到 currentSelectUdfIndexMap[index].get(index) 的值 || get the value of currentSelectUdfIndexMap[index].get(index) + const currentSelectUdfIndexMapValue = currentSelectUdfIndexMap.get(index); + + + // 如果 name 和 currentSelectUdfIndexMapValue?.name 相等 则不做任何操作 || if name and currentSelectUdfIndexMapValue?.name are equal, do nothing + if (currentSelectUdfIndexMapValue?.name && name !== currentSelectUdfIndexMapValue?.name) { + // 更新 currentSelectUdfIndexMap 的值 + processSelectUdfMapState(index, currentSelectUdfIndexMapValue?.className, name); + } + form.setFieldsValue({ + 'configJson': { + 'udfRefer': { + [index]: { + className: currentSelectUdfIndexMapValue?.className ?? '', + name: name + } + } + } + }); + } + return (
{current?.step === JOB_LIFE_CYCLE.PUBLISH && ( @@ -323,37 +405,27 @@ const JobConfig = (props: any) => { ) => { return ( - + { - const simpleClassName = value?.split('.')?.pop() ?? ''; - const lowerName = simpleClassName.charAt(0).toLowerCase() + simpleClassName.slice(1) - - form.setFieldsValue({ - 'configJson': { - 'udfRefer': { - [index]: { - name: lowerName - } - } - } - }); - } - }} + onChange={(value: string) => handleClassChange(value, index)} /> - + > + handleNameChange(e.target.value, index)} + placeholder={l('pages.datastudio.label.udf.name')} + style={{width: calculatorWidth(rightContainer.width) - 80}} + /> + ); From e6d52611230e8ccc1c312c353ae7acb88bc52ea9 Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Tue, 7 May 2024 12:46:38 +0800 Subject: [PATCH 20/47] =?UTF-8?q?=E4=BC=98=E5=8C=96=E4=B8=8B=E6=8B=89?= =?UTF-8?q?=E6=A1=86=E7=A7=BB=E9=99=A4udf=E9=80=BB=E8=BE=91?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Zzm0809 <934230207@qq.com> --- .../pages/DataStudio/RightContainer/JobConfig/index.tsx | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx index 7fd16c6436..a93158dde5 100644 --- a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx +++ b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/index.tsx @@ -395,6 +395,14 @@ const JobConfig = (props: any) => { tooltip={l('pages.datastudio.label.udf.tip')} name={['configJson', 'udfRefer']} copyIconProps={false} + onAfterRemove={(_, index) => { + // 删除一项之后拿到 index 从 currentSelectUdfIndexMap 中删除对应的值 || get the value from currentSelectUdfIndexMap and delete it + setCurrentSelectUdfIndexMap(prevState => { + const newState = new Map(prevState); + newState.delete(index); + return newState; + }); + }} creatorButtonProps={{ style: {width: '100%'}, creatorButtonText: l('pages.datastudio.label.udf.injectUdf') From 1bb6e2fbd26a86939983171a2866f5f3a637af9b Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Wed, 8 May 2024 16:16:55 +0800 Subject: [PATCH 21/47] Optimize UDF display logic and add list display in UDF management Signed-off-by: Zzm0809 <934230207@qq.com> --- .../org/dinky/controller/JarController.java | 15 ++------- .../org/dinky/controller/UDFController.java | 33 +++++++++++++++++++ .../main/java/org/dinky/data/model/Task.java | 3 ++ .../java/org/dinky/data/vo/CascaderVO.java | 12 ++++++- .../java/org/dinky/service/TaskService.java | 2 ++ .../java/org/dinky/service/UDFService.java | 7 ++++ .../dinky/service/impl/FlinkServiceImpl.java | 4 +-- .../dinky/service/impl/TaskServiceImpl.java | 25 ++++++++++---- .../dinky/service/impl/UDFServiceImpl.java | 12 +++++++ .../main/java/org/dinky/utils/UDFUtils.java | 15 +++++++++ .../RightContainer/JobConfig/service.tsx | 2 +- dinky-web/src/services/endpoints.tsx | 2 +- 12 files changed, 107 insertions(+), 25 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/JarController.java b/dinky-admin/src/main/java/org/dinky/controller/JarController.java index 8e7a3469e9..5085233691 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JarController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JarController.java @@ -70,7 +70,7 @@ public Result>> generateJar() { List udfCodes = allUDF.stream() .map(task -> UDF.builder() .code(task.getStatement()) - .className(task.getSavePointPath()) + .className(StrUtil.isEmpty(task.getSavePointPath()) ? task.getConfigJson().getUdfConfig().getClassName(): task.getSavePointPath()) .functionLanguage( FunctionLanguage.valueOf(task.getDialect().toUpperCase())) .build()) @@ -84,16 +84,5 @@ public Result>> generateJar() { return Result.succeed(resultMap, msg); } - @GetMapping("/udf/geUdfs") - @ApiOperation("Get UDFs") - public Result> getUdfs() { - List staticUdfs = Operations.getCustomStaticUdfs(); - List dynamicUdfs = - taskService.getAllUdfEnabled().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); - List allUdfs = new ArrayList<>(staticUdfs); - allUdfs.addAll(dynamicUdfs); - List result = - allUdfs.stream().map(udf -> new CascaderVO(udf.getClassName())).collect(Collectors.toList()); - return Result.succeed(result); - } + } diff --git a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java index 6925a897a4..e83ea68d18 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java @@ -19,15 +19,23 @@ package org.dinky.controller; +import io.swagger.annotations.ApiOperation; +import java.util.ArrayList; +import java.util.stream.Collectors; import org.dinky.data.dto.CommonDTO; import org.dinky.data.model.Resources; import org.dinky.data.model.udf.UDFManage; import org.dinky.data.result.Result; +import org.dinky.data.vo.CascaderVO; import org.dinky.data.vo.UDFManageVO; +import org.dinky.function.data.model.UDF; +import org.dinky.service.TaskService; import org.dinky.service.UDFService; import java.util.List; +import org.dinky.trans.Operations; +import org.dinky.utils.UDFUtils; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; @@ -50,6 +58,7 @@ @RequiredArgsConstructor public class UDFController { private final UDFService udfService; + private final TaskService taskService; /** * update udf name by id @@ -94,4 +103,28 @@ public Result addOrUpdateByResourceId(@RequestBody CommonDTO udfService.addOrUpdateByResourceId(dto.getData()); return Result.succeed(); } + + @GetMapping("/getAllUdfs") + @ApiOperation("Get UDFs") + public Result> getAllUdfs() { + // Get all UDFs of static UDFs and dynamic UDFs + List staticUdfs = Operations.getCustomStaticUdfs(); + // get all UDFs of dynamic UDFs(user defined UDFs in the task) + List userDefinedUdfs = + taskService.getAllUdfEnabled().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); + // get all UDFs of UDFManage table + List udfManageDynamic = udfService.getUDFFromUdfManage().stream().map(UDFUtils::resourceUdfManageToUDF).collect(Collectors.toList()); + + CascaderVO staticUdfCascaderVO = new CascaderVO("Flink Static UDF", staticUdfs.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); + CascaderVO userDefinedUdfCascaderVO = new CascaderVO("User Defined UDF", userDefinedUdfs.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); + CascaderVO udfManageDynamicCascaderVO = new CascaderVO("From UDF Manage", udfManageDynamic.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); + + List result = new ArrayList<>(); + result.add(staticUdfCascaderVO); + result.add(udfManageDynamicCascaderVO); + result.add(userDefinedUdfCascaderVO); + return Result.succeed(result); + } + + } diff --git a/dinky-admin/src/main/java/org/dinky/data/model/Task.java b/dinky-admin/src/main/java/org/dinky/data/model/Task.java index b5d7d3f144..109980bada 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/Task.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/Task.java @@ -157,6 +157,9 @@ public class Task extends SuperEntity { notes = "ID of the version associated with the task") private Integer versionId; + @ApiModelProperty(value = "Enabled", dataType = "Boolean", example = "true", notes = "Whether the task is enabled") + private Boolean enabled; + @ApiModelProperty(value = "Statement", dataType = "String", notes = "SQL statement for the task") private String statement; diff --git a/dinky-admin/src/main/java/org/dinky/data/vo/CascaderVO.java b/dinky-admin/src/main/java/org/dinky/data/vo/CascaderVO.java index 8298ab1f2b..8944316beb 100644 --- a/dinky-admin/src/main/java/org/dinky/data/vo/CascaderVO.java +++ b/dinky-admin/src/main/java/org/dinky/data/vo/CascaderVO.java @@ -55,9 +55,19 @@ public CascaderVO(String label) { this.value = label; } + public CascaderVO(String label , String value) { + this.label = label; + this.value = value; + } + public CascaderVO(String label, List children) { this.label = label; - this.value = label; + this.children = children; + } + + public CascaderVO(String label, String value, List children) { + this.label = label; + this.value = value; this.children = children; } } diff --git a/dinky-admin/src/main/java/org/dinky/service/TaskService.java b/dinky-admin/src/main/java/org/dinky/service/TaskService.java index a42cdba1be..785bb4ff0e 100644 --- a/dinky-admin/src/main/java/org/dinky/service/TaskService.java +++ b/dinky-admin/src/main/java/org/dinky/service/TaskService.java @@ -30,9 +30,11 @@ import org.dinky.data.model.Task; import org.dinky.data.model.home.JobModelOverview; import org.dinky.data.model.home.JobTypeOverView; +import org.dinky.data.model.udf.UDFManage; import org.dinky.data.result.Result; import org.dinky.data.result.SqlExplainResult; import org.dinky.explainer.lineage.LineageResult; +import org.dinky.function.data.model.UDF; import org.dinky.gateway.enums.SavePointType; import org.dinky.gateway.result.SavePointResult; import org.dinky.job.JobConfig; diff --git a/dinky-admin/src/main/java/org/dinky/service/UDFService.java b/dinky-admin/src/main/java/org/dinky/service/UDFService.java index 7054d8e57c..e4d005a880 100644 --- a/dinky-admin/src/main/java/org/dinky/service/UDFService.java +++ b/dinky-admin/src/main/java/org/dinky/service/UDFService.java @@ -55,4 +55,11 @@ public interface UDFService extends IService { */ @Transactional(rollbackFor = Exception.class) void addOrUpdateByResourceId(List resourceIds); + + /** + * get udf from udfManage + * @return List + */ + List getUDFFromUdfManage(); + } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/FlinkServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/FlinkServiceImpl.java index 8a84f0be2c..2261997371 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/FlinkServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/FlinkServiceImpl.java @@ -49,14 +49,14 @@ public List loadConfigOptions() { List flinkConfigOptions = FlinkConfigOptionsUtils.loadOptionsByClassName(name); String binlogGroup = FlinkConfigOptionsUtils.parsedBinlogGroup(name); List child = flinkConfigOptions.stream() - .map(conf -> new CascaderVO(conf.getKey())) + .map(conf -> new CascaderVO(conf.getKey(),conf.getKey())) .collect(Collectors.toList()); CascaderVO cascaderVO = new CascaderVO(binlogGroup, child); dataList.add(cascaderVO); } List voList = documentService.lambdaQuery().eq(Document::getType, "FLINK_OPTIONS").list().stream() - .map(d -> new CascaderVO(d.getName().replace("set ", ""))) + .map(d -> new CascaderVO(d.getName().replace("set ", ""),d.getName().replace("set ", ""))) .collect(Collectors.toList()); CascaderVO cascaderVO = new CascaderVO(); diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java index a1c49545e2..824e7df657 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java @@ -19,6 +19,10 @@ package org.dinky.service.impl; +import java.util.Collections; +import java.util.LinkedList; +import java.util.stream.Collectors; +import mssql.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; import org.dinky.assertion.Asserts; import org.dinky.assertion.DinkyAssert; import org.dinky.config.Dialect; @@ -43,6 +47,7 @@ import org.dinky.data.model.ClusterConfiguration; import org.dinky.data.model.ClusterInstance; import org.dinky.data.model.DataBase; +import org.dinky.data.model.Resources; import org.dinky.data.model.Savepoints; import org.dinky.data.model.SystemConfiguration; import org.dinky.data.model.Task; @@ -53,6 +58,7 @@ import org.dinky.data.model.home.JobModelOverview; import org.dinky.data.model.home.JobTypeOverView; import org.dinky.data.model.job.JobInstance; +import org.dinky.data.model.udf.UDFManage; import org.dinky.data.model.udf.UDFTemplate; import org.dinky.data.result.Result; import org.dinky.data.result.SqlExplainResult; @@ -60,6 +66,7 @@ import org.dinky.explainer.lineage.LineageResult; import org.dinky.explainer.sqllineage.SQLLineageBuilder; import org.dinky.function.compiler.CustomStringJavaCompiler; +import org.dinky.function.data.model.UDF; import org.dinky.function.pool.UdfCodePool; import org.dinky.function.util.UDFUtil; import org.dinky.gateway.enums.SavePointStrategy; @@ -83,8 +90,10 @@ import org.dinky.service.SavepointsService; import org.dinky.service.TaskService; import org.dinky.service.TaskVersionService; +import org.dinky.service.UDFService; import org.dinky.service.UDFTemplateService; import org.dinky.service.UserService; +import org.dinky.service.resource.ResourcesService; import org.dinky.service.task.BaseTask; import org.dinky.utils.FragmentVariableUtils; import org.dinky.utils.JsonUtils; @@ -153,6 +162,8 @@ public class TaskServiceImpl extends SuperServiceImpl implemen private final DataSourceProperties dsProperties; private final UserService userService; private final ApplicationContext applicationContext; + private final UDFService udfService; + private final ResourcesService resourcesService; @Resource @Lazy @@ -706,17 +717,17 @@ public JobModelOverview getJobStreamingOrBatchModelOverview() { @Override public List getAllUdfEnabled() { - return list(new QueryWrapper() - .in("dialect", Dialect.JAVA.getValue(), Dialect.SCALA.getValue(), Dialect.PYTHON.getValue()) - .eq("enabled", 1)); + return list(new LambdaQueryWrapper<>(Task.class ) + .in(Task::getDialect, Dialect.JAVA.getValue(), Dialect.SCALA.getValue(), Dialect.PYTHON.getValue()) + .eq(Task::getEnabled, 1)); } @Override public List getAllUDFWithSavePoint() { - return list(new QueryWrapper() - .in("dialect", Dialect.JAVA.getValue(), Dialect.SCALA.getValue(), Dialect.PYTHON.getValue()) - .eq("enabled", 1) - .isNotNull("save_point_path")); + return list(new LambdaQueryWrapper() + .in(Task::getDialect, Dialect.JAVA.getValue(), Dialect.SCALA.getValue(), Dialect.PYTHON.getValue()) + .eq(Task::getEnabled, 1) + .isNotNull(Task::getSavePointPath)); } @Override diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java index 6b3d6ad7e4..58393cc114 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java @@ -19,6 +19,7 @@ package org.dinky.service.impl; +import java.util.Collections; import org.dinky.config.Dialect; import org.dinky.data.model.Resources; import org.dinky.data.model.udf.UDFManage; @@ -144,6 +145,17 @@ public void addOrUpdateByResourceId(List resourceIds) { } } + /** + * @return + */ + @Override + public List getUDFFromUdfManage() { + // 1. get all resources + List resourcesList = resourcesService.list(); + // 2. get all udf from udf manage and then filter the udf by resources id in resources list + return this.list().stream().filter(udf-> resourcesList.stream().anyMatch(resources -> resources.getId().equals(udf.getResourcesId()))) + .collect(Collectors.toList()); + } private static String getSimpleClassName(String className) { final List packages = StrUtil.split(className, CharUtil.DOT); if (null == packages || packages.size() < 2) { diff --git a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java index 1e771df27e..8af7718674 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java +++ b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java @@ -22,6 +22,7 @@ import org.dinky.assertion.Asserts; import org.dinky.data.exception.BusException; import org.dinky.data.model.Task; +import org.dinky.data.model.udf.UDFManage; import org.dinky.function.data.model.UDF; import org.dinky.function.util.UDFUtil; @@ -41,4 +42,18 @@ public static UDF taskToUDF(Task task) { throw new BusException("udf `class` config is null,please check your udf task config"); } } + + public static UDF resourceUdfManageToUDF(UDFManage udfManage) { + if (Asserts.isNotNull(udfManage)) { + return UDF.builder() + .className(udfManage.getClassName()) + .code(udfManage.getName()) + // todo: I don't know how to specify the language +// .functionLanguage(FunctionLanguage.valueOf(task.getDialect().toUpperCase())) + .build(); + } else { + throw new BusException("udf `class` config is null,please check your udf task config"); + } + } + } diff --git a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/service.tsx b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/service.tsx index 62512567ac..b004ea8092 100644 --- a/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/service.tsx +++ b/dinky-web/src/pages/DataStudio/RightContainer/JobConfig/service.tsx @@ -35,7 +35,7 @@ export function getFlinkConfigs() { } export function getFlinkUdfOptions() { - return queryDataByParams(API_CONSTANTS.FLINK_UDF_OPTIONS); + return queryDataByParams(API_CONSTANTS.ALL_UDF_LIST); } diff --git a/dinky-web/src/services/endpoints.tsx b/dinky-web/src/services/endpoints.tsx index 8e9ad52a11..d21edb0d1e 100644 --- a/dinky-web/src/services/endpoints.tsx +++ b/dinky-web/src/services/endpoints.tsx @@ -164,6 +164,7 @@ export enum API_CONSTANTS { UDF_RESOURCES_LIST = '/api/udf/udfResourcesList', UDF_ADD = '/api/udf/addOrUpdateByResourceId', UDF_UPDATE = '/api/udf/update', + ALL_UDF_LIST = '/api/udf/getAllUdfs', // ------------------------------------ udf template ------------------------------------ UDF_TEMPLATE = '/api/udf/template/list', @@ -285,7 +286,6 @@ export enum API_CONSTANTS { // ------------------------------------ flink conf about ------------------------------------ READ_CHECKPOINT = '/api/flinkConf/readCheckPoint', FLINK_CONF_CONFIG_OPTIONS = '/api/flinkConf/configOptions', - FLINK_UDF_OPTIONS = '/api/jar/udf/geUdfs', // ------------------------------------ suggestion ------------------------------------ SUGGESTION_QUERY_ALL_SUGGESTIONS = '/api/suggestion/queryAllSuggestions' From d6e19a419efa928f56044e49fbc5d5412cba4f21 Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Wed, 8 May 2024 17:03:17 +0800 Subject: [PATCH 22/47] added udf manage table column Signed-off-by: Zzm0809 <934230207@qq.com> --- .../src/main/java/org/dinky/data/model/udf/UDFManage.java | 3 +++ .../src/main/java/org/dinky/service/impl/UDFServiceImpl.java | 3 +++ dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java | 3 +-- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/data/model/udf/UDFManage.java b/dinky-admin/src/main/java/org/dinky/data/model/udf/UDFManage.java index 92d9b38ea9..9935725551 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/udf/UDFManage.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/udf/UDFManage.java @@ -43,6 +43,9 @@ public class UDFManage extends SuperEntity { @ApiModelProperty(value = "Class Name", dataType = "String", notes = "Class Name") private String className; + @ApiModelProperty(value = "Language", dataType = "String", notes = "Language") + private String language; + @ApiModelProperty(value = "Task Id", dataType = "Integer", notes = "Task Id") private Integer taskId; diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java index 58393cc114..3740cb649b 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java @@ -20,6 +20,7 @@ package org.dinky.service.impl; import java.util.Collections; +import org.apache.flink.table.catalog.FunctionLanguage; import org.dinky.config.Dialect; import org.dinky.data.model.Resources; import org.dinky.data.model.udf.UDFManage; @@ -119,6 +120,7 @@ public void addOrUpdateByResourceId(List resourceIds) { return classes.stream().map(clazz -> { UDFManage udfManage = UDFManage.builder() .className(clazz.getName()) + .language(FunctionLanguage.JAVA.name()) .resourcesId(x.getId()) .build(); udfManage.setName(StrUtil.toUnderlineCase(getSimpleClassName(clazz.getName()))); @@ -131,6 +133,7 @@ public void addOrUpdateByResourceId(List resourceIds) { UDFManage udfManage = UDFManage.builder() .className(className) .resourcesId(x.getId()) + .language(FunctionLanguage.PYTHON.name()) .build(); udfManage.setName(StrUtil.toUnderlineCase(getSimpleClassName(className))); return udfManage; diff --git a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java index 8af7718674..a35ad64f9c 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java +++ b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java @@ -48,8 +48,7 @@ public static UDF resourceUdfManageToUDF(UDFManage udfManage) { return UDF.builder() .className(udfManage.getClassName()) .code(udfManage.getName()) - // todo: I don't know how to specify the language -// .functionLanguage(FunctionLanguage.valueOf(task.getDialect().toUpperCase())) + .functionLanguage(FunctionLanguage.valueOf(udfManage.getLanguage().toUpperCase())) .build(); } else { throw new BusException("udf `class` config is null,please check your udf task config"); From de3601b34fbc914dd519fe4e93e32519c8c792cd Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Wed, 8 May 2024 17:12:36 +0800 Subject: [PATCH 23/47] added change sql file Signed-off-by: Zzm0809 <934230207@qq.com> --- .../src/main/resources/db/db-h2-ddl.sql | 1 + script/sql/dinky-mysql.sql | 1 + script/sql/dinky-pg.sql | 2 + .../upgrade/1.1.0_schema/mysql/dinky_ddl.sql | 37 +++++++++++++++++++ .../upgrade/1.1.0_schema/mysql/dinky_dml.sql | 35 ++++++++++++++++++ .../1.1.0_schema/postgre/dinky_ddl.sql | 22 +++++++++++ .../1.1.0_schema/postgre/dinky_dml.sql | 31 ++++++++++++++++ 7 files changed, 129 insertions(+) create mode 100644 script/sql/upgrade/1.1.0_schema/mysql/dinky_ddl.sql create mode 100644 script/sql/upgrade/1.1.0_schema/mysql/dinky_dml.sql create mode 100644 script/sql/upgrade/1.1.0_schema/postgre/dinky_ddl.sql create mode 100644 script/sql/upgrade/1.1.0_schema/postgre/dinky_dml.sql diff --git a/dinky-admin/src/main/resources/db/db-h2-ddl.sql b/dinky-admin/src/main/resources/db/db-h2-ddl.sql index 684283d7b3..c40a168c97 100644 --- a/dinky-admin/src/main/resources/db/db-h2-ddl.sql +++ b/dinky-admin/src/main/resources/db/db-h2-ddl.sql @@ -625,6 +625,7 @@ CREATE TABLE IF NOT EXISTS `dinky_udf_manage` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(50) DEFAULT NULL COMMENT 'udf name', `class_name` varchar(50) DEFAULT NULL COMMENT 'Complete class name', + `language` varchar(10) DEFAULT NULL COMMENT 'language', `task_id` int(11) DEFAULT NULL COMMENT 'task id', `resources_id` int(11) DEFAULT NULL COMMENT 'resources id', `enabled` tinyint(1) DEFAULT 1 COMMENT 'is enable', diff --git a/script/sql/dinky-mysql.sql b/script/sql/dinky-mysql.sql index c2cc5d0785..376b250456 100644 --- a/script/sql/dinky-mysql.sql +++ b/script/sql/dinky-mysql.sql @@ -2013,6 +2013,7 @@ CREATE TABLE `dinky_udf_manage` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(50) DEFAULT NULL COMMENT 'udf name', `class_name` varchar(50) DEFAULT NULL COMMENT 'Complete class name', + `language` varchar(10) DEFAULT NULL COMMENT 'language', `task_id` int(11) DEFAULT NULL COMMENT 'task id', `resources_id` int(11) DEFAULT NULL COMMENT 'resources id', `enabled` tinyint(1) DEFAULT 1 COMMENT 'is enable', diff --git a/script/sql/dinky-pg.sql b/script/sql/dinky-pg.sql index eae8ddcebc..7e86342847 100644 --- a/script/sql/dinky-pg.sql +++ b/script/sql/dinky-pg.sql @@ -4060,6 +4060,7 @@ CREATE TABLE dinky_udf_manage id SERIAL PRIMARY KEY NOT NULL, name VARCHAR(50), class_name VARCHAR(50), + language varchar(10) , task_id INT, resources_id INT, enabled BOOLEAN DEFAULT TRUE, @@ -4074,6 +4075,7 @@ CREATE INDEX name_resources_id_idx ON dinky_udf_manage (name, resources_id); COMMENT ON COLUMN dinky_udf_manage.id IS 'id'; COMMENT ON COLUMN dinky_udf_manage.name IS 'udf name'; COMMENT ON COLUMN dinky_udf_manage.class_name IS 'Complete class name'; +COMMENT ON COLUMN dinky_udf_manage.language IS 'udf language'; COMMENT ON COLUMN dinky_udf_manage.task_id IS 'task_id'; COMMENT ON COLUMN dinky_udf_manage.resources_id IS 'resources_id'; diff --git a/script/sql/upgrade/1.1.0_schema/mysql/dinky_ddl.sql b/script/sql/upgrade/1.1.0_schema/mysql/dinky_ddl.sql new file mode 100644 index 0000000000..b7d292f089 --- /dev/null +++ b/script/sql/upgrade/1.1.0_schema/mysql/dinky_ddl.sql @@ -0,0 +1,37 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + + SET NAMES utf8mb4; + SET FOREIGN_KEY_CHECKS = 0; + +alter table dinky_udf_manage add column `language` VARCHAR(10) DEFAULT null comment 'udf language' after class_name; + +UPDATE + dinky_udf_manage duml + JOIN + dinky_resources r ON duml.resources_id = r.id +SET + duml.`language` = + CASE + WHEN r.file_name LIKE '%.zip' THEN 'python' + WHEN r.file_name LIKE '%.jar' THEN 'java' + ELSE 'unknown' + END; + +SET FOREIGN_KEY_CHECKS = 1; diff --git a/script/sql/upgrade/1.1.0_schema/mysql/dinky_dml.sql b/script/sql/upgrade/1.1.0_schema/mysql/dinky_dml.sql new file mode 100644 index 0000000000..d0b5bb1677 --- /dev/null +++ b/script/sql/upgrade/1.1.0_schema/mysql/dinky_dml.sql @@ -0,0 +1,35 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +SET NAMES utf8mb4; +SET FOREIGN_KEY_CHECKS = 0; + +UPDATE + dinky_udf_manage duml + JOIN + dinky_resources r ON duml.resources_id = r.id +SET + duml.`language` = + CASE + WHEN r.file_name LIKE '%.zip' OR r.file_name LIKE '%.py' THEN 'python' + WHEN r.file_name LIKE '%.jar' THEN 'java' + ELSE 'unknown' + END; + +SET FOREIGN_KEY_CHECKS = 1; diff --git a/script/sql/upgrade/1.1.0_schema/postgre/dinky_ddl.sql b/script/sql/upgrade/1.1.0_schema/postgre/dinky_ddl.sql new file mode 100644 index 0000000000..8b8cadd960 --- /dev/null +++ b/script/sql/upgrade/1.1.0_schema/postgre/dinky_ddl.sql @@ -0,0 +1,22 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + + +ALTER TABLE dinky_udf_manage ADD COLUMN "language" VARCHAR(10) DEFAULT NULL; +COMMENT ON COLUMN dinky_udf_manage.language IS 'udf language'; diff --git a/script/sql/upgrade/1.1.0_schema/postgre/dinky_dml.sql b/script/sql/upgrade/1.1.0_schema/postgre/dinky_dml.sql new file mode 100644 index 0000000000..3c1d9a3df7 --- /dev/null +++ b/script/sql/upgrade/1.1.0_schema/postgre/dinky_dml.sql @@ -0,0 +1,31 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +UPDATE + dinky_udf_manage duml +SET + "language" = + CASE + WHEN r.file_name LIKE '%.zip' OR r.file_name LIKE '%.py' THEN 'python' + WHEN r.file_name LIKE '%.jar' THEN 'java' + ELSE 'unknown' + END + FROM dinky_resources r +WHERE + duml.resources_id = r.id; From e2fd68c2e9583fe3b634cdaac71dfff9ea15fc5a Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Wed, 8 May 2024 18:16:35 +0800 Subject: [PATCH 24/47] distinct udf Signed-off-by: Zzm0809 <934230207@qq.com> --- .../org/dinky/controller/UDFController.java | 3 ++ .../java/org/dinky/data/vo/UDFManageVO.java | 1 + .../dinky/service/impl/UDFServiceImpl.java | 40 +++++++++---------- .../UDF/components/UDFRegister/index.tsx | 6 +++ dinky-web/src/types/RegCenter/data.d.ts | 1 + 5 files changed, 31 insertions(+), 20 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java index e83ea68d18..61be2d5c87 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java @@ -19,8 +19,11 @@ package org.dinky.controller; +import cn.hutool.core.collection.CollUtil; import io.swagger.annotations.ApiOperation; import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; import java.util.stream.Collectors; import org.dinky.data.dto.CommonDTO; import org.dinky.data.model.Resources; diff --git a/dinky-admin/src/main/java/org/dinky/data/vo/UDFManageVO.java b/dinky-admin/src/main/java/org/dinky/data/vo/UDFManageVO.java index 4ef22a55a5..17e7f63352 100644 --- a/dinky-admin/src/main/java/org/dinky/data/vo/UDFManageVO.java +++ b/dinky-admin/src/main/java/org/dinky/data/vo/UDFManageVO.java @@ -30,6 +30,7 @@ public class UDFManageVO implements Serializable { private String name; private Boolean enabled; private String className; + private String language; private Integer taskId; private Integer resourcesId; /** diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java index 3740cb649b..9e254e38bf 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java @@ -19,7 +19,20 @@ package org.dinky.service.impl; -import java.util.Collections; +import cn.hutool.core.collection.CollUtil; +import cn.hutool.core.io.FileUtil; +import cn.hutool.core.lang.Assert; +import cn.hutool.core.util.CharUtil; +import cn.hutool.core.util.StrUtil; +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; +import java.io.File; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; import org.apache.flink.table.catalog.FunctionLanguage; import org.dinky.config.Dialect; import org.dinky.data.model.Resources; @@ -29,27 +42,9 @@ import org.dinky.service.UDFService; import org.dinky.service.resource.ResourcesService; import org.dinky.utils.UDFUtils; - -import java.io.File; -import java.util.Collection; -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; - import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; -import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; - -import cn.hutool.core.collection.CollUtil; -import cn.hutool.core.io.FileUtil; -import cn.hutool.core.lang.Assert; -import cn.hutool.core.util.CharUtil; -import cn.hutool.core.util.StrUtil; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - /** * @since 0.6.8 */ @@ -78,8 +73,10 @@ public List selectAll() { String fileName = x.getFileName(); if ("jar".equals(FileUtil.getSuffix(fileName))) { x.setDialect(Dialect.JAVA.getValue()); + x.setLanguage(Dialect.JAVA.getValue()); } else { x.setDialect(Dialect.PYTHON.getValue()); + x.setLanguage(Dialect.JAVA.getValue()); } }) .collect(Collectors.toList()); @@ -156,9 +153,12 @@ public List getUDFFromUdfManage() { // 1. get all resources List resourcesList = resourcesService.list(); // 2. get all udf from udf manage and then filter the udf by resources id in resources list - return this.list().stream().filter(udf-> resourcesList.stream().anyMatch(resources -> resources.getId().equals(udf.getResourcesId()))) + List collect = this.list().stream().filter(udf -> resourcesList.stream().anyMatch(resources -> resources.getId().equals(udf.getResourcesId()))) .collect(Collectors.toList()); + // 去重 根据 className 去重 || distinct by className + return collect.stream().collect(Collectors.toMap(UDFManage::getClassName, udf -> udf, (a, b) -> a)).values().stream().collect(Collectors.toList()); } + private static String getSimpleClassName(String className) { final List packages = StrUtil.split(className, CharUtil.DOT); if (null == packages || packages.size() < 2) { diff --git a/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx index 9c5f5ca2d6..7f795ec303 100644 --- a/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx +++ b/dinky-web/src/pages/RegCenter/UDF/components/UDFRegister/index.tsx @@ -131,6 +131,12 @@ const UDFRegister: React.FC = (props) => { readonly: true, width: '15%' }, + { + title: l('rc.udf.register.language'), + dataIndex: 'language', + readonly: true, + width: '15%' + }, { title: l('global.table.updateTime'), dataIndex: 'updateTime', diff --git a/dinky-web/src/types/RegCenter/data.d.ts b/dinky-web/src/types/RegCenter/data.d.ts index 1816fa3bee..7a2d305208 100644 --- a/dinky-web/src/types/RegCenter/data.d.ts +++ b/dinky-web/src/types/RegCenter/data.d.ts @@ -341,6 +341,7 @@ export interface UDFRegisterInfo { resourcesId: number; name: string; className: string; + language: string; enable: boolean; dialect: string; source: string; From 1ff6bc0483aec799862a4f9aec56d32b9e629ac7 Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Fri, 10 May 2024 13:57:02 +0800 Subject: [PATCH 25/47] Update JarController.java --- .../main/java/org/dinky/controller/JarController.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/JarController.java b/dinky-admin/src/main/java/org/dinky/controller/JarController.java index 5085233691..dbf21e85aa 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JarController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JarController.java @@ -68,12 +68,12 @@ public class JarController { public Result>> generateJar() { List allUDF = taskService.getAllUDFWithSavePoint(); List udfCodes = allUDF.stream() - .map(task -> UDF.builder() - .code(task.getStatement()) - .className(StrUtil.isEmpty(task.getSavePointPath()) ? task.getConfigJson().getUdfConfig().getClassName(): task.getSavePointPath()) - .functionLanguage( + 。map(task -> UDF.builder() + 。code(task.getStatement()) + 。className(task.getConfigJson()。getUdfConfig().getClassName()) + 。functionLanguage( FunctionLanguage.valueOf(task.getDialect().toUpperCase())) - .build()) + 。build()) .collect(Collectors.toList()); Map> resultMap = UDFUtil.buildJar(udfCodes); String msg = StrUtil.format( From 5e08fbcd047b8152e7e5bf7c69a40956e729d66a Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Fri, 10 May 2024 14:03:22 +0800 Subject: [PATCH 26/47] remove some method Signed-off-by: Zzm0809 <934230207@qq.com> --- .../org/dinky/controller/JarController.java | 12 +++++------ .../org/dinky/controller/UDFController.java | 6 +++--- .../java/org/dinky/service/TaskService.java | 8 ------- .../dinky/service/impl/TaskServiceImpl.java | 21 ++++--------------- 4 files changed, 13 insertions(+), 34 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/JarController.java b/dinky-admin/src/main/java/org/dinky/controller/JarController.java index dbf21e85aa..1c6d865a42 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JarController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JarController.java @@ -66,14 +66,14 @@ public class JarController { @PostMapping("/udf/generateJar") @ApiOperation("Generate jar") public Result>> generateJar() { - List allUDF = taskService.getAllUDFWithSavePoint(); + List allUDF = taskService.getReleaseUDF(); List udfCodes = allUDF.stream() - 。map(task -> UDF.builder() - 。code(task.getStatement()) - 。className(task.getConfigJson()。getUdfConfig().getClassName()) - 。functionLanguage( + .map(task -> UDF.builder() + .code(task.getStatement()) + .className(task.getConfigJson().getUdfConfig().getClassName()) + .functionLanguage( FunctionLanguage.valueOf(task.getDialect().toUpperCase())) - 。build()) + .build()) .collect(Collectors.toList()); Map> resultMap = UDFUtil.buildJar(udfCodes); String msg = StrUtil.format( diff --git a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java index 61be2d5c87..fff8d4f69b 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java @@ -113,13 +113,13 @@ public Result> getAllUdfs() { // Get all UDFs of static UDFs and dynamic UDFs List staticUdfs = Operations.getCustomStaticUdfs(); // get all UDFs of dynamic UDFs(user defined UDFs in the task) - List userDefinedUdfs = - taskService.getAllUdfEnabled().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); + List userDefinedReleaseUdfs = + taskService.getReleaseUDF().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); // get all UDFs of UDFManage table List udfManageDynamic = udfService.getUDFFromUdfManage().stream().map(UDFUtils::resourceUdfManageToUDF).collect(Collectors.toList()); CascaderVO staticUdfCascaderVO = new CascaderVO("Flink Static UDF", staticUdfs.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); - CascaderVO userDefinedUdfCascaderVO = new CascaderVO("User Defined UDF", userDefinedUdfs.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); + CascaderVO userDefinedUdfCascaderVO = new CascaderVO("User Defined Release UDF", userDefinedReleaseUdfs.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); CascaderVO udfManageDynamicCascaderVO = new CascaderVO("From UDF Manage", udfManageDynamic.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); List result = new ArrayList<>(); diff --git a/dinky-admin/src/main/java/org/dinky/service/TaskService.java b/dinky-admin/src/main/java/org/dinky/service/TaskService.java index 785bb4ff0e..ab8adbe31b 100644 --- a/dinky-admin/src/main/java/org/dinky/service/TaskService.java +++ b/dinky-admin/src/main/java/org/dinky/service/TaskService.java @@ -192,14 +192,6 @@ public interface TaskService extends ISuperService { */ Task initDefaultFlinkSQLEnv(Integer tenantId); - List getAllUdfEnabled(); - - /** - * Get a list of user-defined functions (UDFs) that have savepoint path in the system. - * - * @return A list of {@link Task} objects representing the UDFs. - */ - List getAllUDFWithSavePoint(); /** * Get a list of all release user-defined functions (UDFs) in the system. diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java index 824e7df657..ceaa3da065 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java @@ -715,28 +715,15 @@ public JobModelOverview getJobStreamingOrBatchModelOverview() { return baseMapper.getJobStreamingOrBatchModelOverview(); } - @Override - public List getAllUdfEnabled() { - return list(new LambdaQueryWrapper<>(Task.class ) - .in(Task::getDialect, Dialect.JAVA.getValue(), Dialect.SCALA.getValue(), Dialect.PYTHON.getValue()) - .eq(Task::getEnabled, 1)); - } - - @Override - public List getAllUDFWithSavePoint() { - return list(new LambdaQueryWrapper() - .in(Task::getDialect, Dialect.JAVA.getValue(), Dialect.SCALA.getValue(), Dialect.PYTHON.getValue()) - .eq(Task::getEnabled, 1) - .isNotNull(Task::getSavePointPath)); - } - @Override public List getReleaseUDF() { return list(new LambdaQueryWrapper() .in(Task::getDialect, Dialect.JAVA.getValue(), Dialect.SCALA.getValue(), Dialect.PYTHON.getValue()) .eq(Task::getEnabled, 1) - .eq(Task::getStep, JobLifeCycle.PUBLISH.getValue()) - .isNotNull(Task::getSavePointPath)); + .eq(Task::getStep, JobLifeCycle.PUBLISH.getValue())) + .stream() + .filter(task -> Asserts.isNotNullString(task.getConfigJson().getUdfConfig().getClassName())) + .collect(Collectors.toList()); } @Override From a7fcb1fc6f21b1a5f6f387093450a5768fec39f7 Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Fri, 10 May 2024 14:04:05 +0800 Subject: [PATCH 27/47] modify code Signed-off-by: Zzm0809 <934230207@qq.com> --- dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java index a35ad64f9c..2c24df1d04 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java +++ b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java @@ -48,7 +48,7 @@ public static UDF resourceUdfManageToUDF(UDFManage udfManage) { return UDF.builder() .className(udfManage.getClassName()) .code(udfManage.getName()) - .functionLanguage(FunctionLanguage.valueOf(udfManage.getLanguage().toUpperCase())) + .functionLanguage(FunctionLanguage.valueOf(udfManage.getLanguage().toLowerCase())) .build(); } else { throw new BusException("udf `class` config is null,please check your udf task config"); From db1b46456850854ffd6bbe22d6bbea5981eb99e4 Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Fri, 10 May 2024 14:04:21 +0800 Subject: [PATCH 28/47] modify code Signed-off-by: Zzm0809 <934230207@qq.com> --- dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java index 2c24df1d04..a35ad64f9c 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java +++ b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java @@ -48,7 +48,7 @@ public static UDF resourceUdfManageToUDF(UDFManage udfManage) { return UDF.builder() .className(udfManage.getClassName()) .code(udfManage.getName()) - .functionLanguage(FunctionLanguage.valueOf(udfManage.getLanguage().toLowerCase())) + .functionLanguage(FunctionLanguage.valueOf(udfManage.getLanguage().toUpperCase())) .build(); } else { throw new BusException("udf `class` config is null,please check your udf task config"); From 79ac1cb8d5592efee62adeb8edb24c5ce430893e Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Fri, 10 May 2024 14:09:55 +0800 Subject: [PATCH 29/47] Optimize code Signed-off-by: Zzm0809 <934230207@qq.com> --- .../org/dinky/controller/UDFController.java | 28 +++++---------- .../java/org/dinky/service/UDFService.java | 8 +++++ .../dinky/service/impl/UDFServiceImpl.java | 35 +++++++++++++++++++ 3 files changed, 51 insertions(+), 20 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java index fff8d4f69b..40c52d5e32 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java @@ -61,7 +61,6 @@ @RequiredArgsConstructor public class UDFController { private final UDFService udfService; - private final TaskService taskService; /** * update udf name by id @@ -107,26 +106,15 @@ public Result addOrUpdateByResourceId(@RequestBody CommonDTO return Result.succeed(); } - @GetMapping("/getAllUdfs") - @ApiOperation("Get UDFs") - public Result> getAllUdfs() { - // Get all UDFs of static UDFs and dynamic UDFs - List staticUdfs = Operations.getCustomStaticUdfs(); - // get all UDFs of dynamic UDFs(user defined UDFs in the task) - List userDefinedReleaseUdfs = - taskService.getReleaseUDF().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); - // get all UDFs of UDFManage table - List udfManageDynamic = udfService.getUDFFromUdfManage().stream().map(UDFUtils::resourceUdfManageToUDF).collect(Collectors.toList()); - - CascaderVO staticUdfCascaderVO = new CascaderVO("Flink Static UDF", staticUdfs.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); - CascaderVO userDefinedUdfCascaderVO = new CascaderVO("User Defined Release UDF", userDefinedReleaseUdfs.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); - CascaderVO udfManageDynamicCascaderVO = new CascaderVO("From UDF Manage", udfManageDynamic.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); + /** + * get all udf and convert its to cascader + * @return {@link Result} of {@link List} of {@link CascaderVO} + */ - List result = new ArrayList<>(); - result.add(staticUdfCascaderVO); - result.add(udfManageDynamicCascaderVO); - result.add(userDefinedUdfCascaderVO); - return Result.succeed(result); + @GetMapping("/getAllUdfs") + @ApiOperation("Get All UDFs") + public Result> getAllUdfsToCascader() { + return Result.succeed(udfService.getAllUdfsToCascader()); } diff --git a/dinky-admin/src/main/java/org/dinky/service/UDFService.java b/dinky-admin/src/main/java/org/dinky/service/UDFService.java index e4d005a880..5578da3092 100644 --- a/dinky-admin/src/main/java/org/dinky/service/UDFService.java +++ b/dinky-admin/src/main/java/org/dinky/service/UDFService.java @@ -21,6 +21,7 @@ import org.dinky.data.model.Resources; import org.dinky.data.model.udf.UDFManage; +import org.dinky.data.vo.CascaderVO; import org.dinky.data.vo.UDFManageVO; import java.util.List; @@ -62,4 +63,11 @@ public interface UDFService extends IService { */ List getUDFFromUdfManage(); + + /** + * get all udf to cascader list + * @return List + */ + List getAllUdfsToCascader(); + } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java index 9e254e38bf..2bb008c7ce 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java @@ -27,7 +27,10 @@ import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; import java.io.File; +import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; +import java.util.LinkedList; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -37,10 +40,14 @@ import org.dinky.config.Dialect; import org.dinky.data.model.Resources; import org.dinky.data.model.udf.UDFManage; +import org.dinky.data.vo.CascaderVO; import org.dinky.data.vo.UDFManageVO; +import org.dinky.function.data.model.UDF; import org.dinky.mapper.UDFManageMapper; +import org.dinky.service.TaskService; import org.dinky.service.UDFService; import org.dinky.service.resource.ResourcesService; +import org.dinky.trans.Operations; import org.dinky.utils.UDFUtils; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -53,6 +60,8 @@ @Slf4j public class UDFServiceImpl extends ServiceImpl implements UDFService { private final ResourcesService resourcesService; + private final TaskService taskService; + @Override public boolean update(UDFManage entity) { @@ -159,6 +168,32 @@ public List getUDFFromUdfManage() { return collect.stream().collect(Collectors.toMap(UDFManage::getClassName, udf -> udf, (a, b) -> a)).values().stream().collect(Collectors.toList()); } + /** + * get all udf to cascader list + * + * @return List + */ + @Override + public List getAllUdfsToCascader() { + // Get all UDFs of static UDFs and dynamic UDFs + List staticUdfs = Operations.getCustomStaticUdfs(); + // get all UDFs of dynamic UDFs(user defined UDFs in the task) + List userDefinedReleaseUdfs = + taskService.getReleaseUDF().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); + // get all UDFs of UDFManage table + List udfManageDynamic = getUDFFromUdfManage().stream().map(UDFUtils::resourceUdfManageToUDF).collect(Collectors.toList()); + + CascaderVO staticUdfCascaderVO = new CascaderVO("Flink Static UDF", staticUdfs.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); + CascaderVO userDefinedUdfCascaderVO = new CascaderVO("User Defined Release UDF", userDefinedReleaseUdfs.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); + CascaderVO udfManageDynamicCascaderVO = new CascaderVO("From UDF Manage", udfManageDynamic.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); + + List result = new LinkedList<>(); + result.add(staticUdfCascaderVO); + result.add(udfManageDynamicCascaderVO); + result.add(userDefinedUdfCascaderVO); + return result; + } + private static String getSimpleClassName(String className) { final List packages = StrUtil.split(className, CharUtil.DOT); if (null == packages || packages.size() < 2) { From 07be1317a2a3651daf9a014e217a4248d86e978f Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Fri, 10 May 2024 14:22:21 +0800 Subject: [PATCH 30/47] Optimize code Signed-off-by: Zzm0809 <934230207@qq.com> --- .../src/main/java/org/dinky/controller/UDFController.java | 6 +++++- .../src/main/java/org/dinky/service/UDFService.java | 3 ++- .../main/java/org/dinky/service/impl/UDFServiceImpl.java | 7 ++----- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java index 40c52d5e32..afd7889c2c 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java @@ -61,6 +61,7 @@ @RequiredArgsConstructor public class UDFController { private final UDFService udfService; + private final TaskService taskService; /** * update udf name by id @@ -114,7 +115,10 @@ public Result addOrUpdateByResourceId(@RequestBody CommonDTO @GetMapping("/getAllUdfs") @ApiOperation("Get All UDFs") public Result> getAllUdfsToCascader() { - return Result.succeed(udfService.getAllUdfsToCascader()); + // get all UDFs of dynamic UDFs(user defined UDFs in the task) + List userDefinedReleaseUdfs = + taskService.getReleaseUDF().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); + return Result.succeed(udfService.getAllUdfsToCascader(userDefinedReleaseUdfs)); } diff --git a/dinky-admin/src/main/java/org/dinky/service/UDFService.java b/dinky-admin/src/main/java/org/dinky/service/UDFService.java index 5578da3092..cc5d1b5534 100644 --- a/dinky-admin/src/main/java/org/dinky/service/UDFService.java +++ b/dinky-admin/src/main/java/org/dinky/service/UDFService.java @@ -26,6 +26,7 @@ import java.util.List; +import org.dinky.function.data.model.UDF; import org.springframework.transaction.annotation.Transactional; import com.baomidou.mybatisplus.extension.service.IService; @@ -68,6 +69,6 @@ public interface UDFService extends IService { * get all udf to cascader list * @return List */ - List getAllUdfsToCascader(); + List getAllUdfsToCascader(List userDefinedReleaseUdfs); } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java index 2bb008c7ce..8ba1a4469e 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java @@ -60,7 +60,6 @@ @Slf4j public class UDFServiceImpl extends ServiceImpl implements UDFService { private final ResourcesService resourcesService; - private final TaskService taskService; @Override @@ -174,12 +173,10 @@ public List getUDFFromUdfManage() { * @return List */ @Override - public List getAllUdfsToCascader() { + public List getAllUdfsToCascader(List userDefinedReleaseUdfs) { // Get all UDFs of static UDFs and dynamic UDFs List staticUdfs = Operations.getCustomStaticUdfs(); - // get all UDFs of dynamic UDFs(user defined UDFs in the task) - List userDefinedReleaseUdfs = - taskService.getReleaseUDF().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); + // get all UDFs of UDFManage table List udfManageDynamic = getUDFFromUdfManage().stream().map(UDFUtils::resourceUdfManageToUDF).collect(Collectors.toList()); From 83f6b1e2b76b317f53832de0488e791687f3f3dc Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Fri, 10 May 2024 14:26:45 +0800 Subject: [PATCH 31/47] Optimize code Signed-off-by: Zzm0809 <934230207@qq.com> --- dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java index a35ad64f9c..2543bbd376 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java +++ b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java @@ -51,7 +51,7 @@ public static UDF resourceUdfManageToUDF(UDFManage udfManage) { .functionLanguage(FunctionLanguage.valueOf(udfManage.getLanguage().toUpperCase())) .build(); } else { - throw new BusException("udf `class` config is null,please check your udf task config"); + throw new BusException("udf `class` config is null, Please check if the resource file to which this udf belongs exists"); } } From ac4970e3596e26800b014df15ff9d5a85b302a73 Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Fri, 10 May 2024 14:30:07 +0800 Subject: [PATCH 32/47] Optimize code Signed-off-by: Zzm0809 <934230207@qq.com> --- dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java index 2543bbd376..5c666a85ce 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java +++ b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java @@ -46,8 +46,8 @@ public static UDF taskToUDF(Task task) { public static UDF resourceUdfManageToUDF(UDFManage udfManage) { if (Asserts.isNotNull(udfManage)) { return UDF.builder() + .name(udfManage.getName()) .className(udfManage.getClassName()) - .code(udfManage.getName()) .functionLanguage(FunctionLanguage.valueOf(udfManage.getLanguage().toUpperCase())) .build(); } else { From 40532175c56ef99bcaa3d03c017f1ecc5e76e95e Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Fri, 10 May 2024 14:45:42 +0800 Subject: [PATCH 33/47] Optimize code Signed-off-by: Zzm0809 <934230207@qq.com> --- .../src/main/java/org/dinky/data/model/ext/TaskUdfConfig.java | 2 +- .../src/main/java/org/dinky/service/impl/UDFServiceImpl.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskUdfConfig.java b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskUdfConfig.java index 99958c79ef..cda3a22ddb 100644 --- a/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskUdfConfig.java +++ b/dinky-admin/src/main/java/org/dinky/data/model/ext/TaskUdfConfig.java @@ -33,7 +33,7 @@ @AllArgsConstructor @NoArgsConstructor public class TaskUdfConfig implements Serializable { - private static final long serialVersionUID = 1L; + private static final long serialVersionUID = -5981544561742928810L; @ApiModelProperty(value = "Template ID", dataType = "Integer", example = "1", notes = "ID of the UDF template") private Integer templateId; diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java index 8ba1a4469e..b4f8f6b573 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java @@ -144,7 +144,7 @@ public void addOrUpdateByResourceId(List resourceIds) { return udfManage; }); } else { - log.error("Unsupported file type: {}", suffix); + log.error("Unsupported file type to add UDFManage, extension: {}", suffix); } return Stream.of(); }) From 8af65053f777091d3f22170ee3d937960764b03b Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Tue, 14 May 2024 09:20:58 +0800 Subject: [PATCH 34/47] added confirm tips Signed-off-by: Zzm0809 <934230207@qq.com> --- dinky-web/src/locales/en-US/pages.ts | 1 + dinky-web/src/locales/zh-CN/pages.ts | 1 + .../Resource/components/ResourceOverView/index.tsx | 13 ++++++++++--- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/dinky-web/src/locales/en-US/pages.ts b/dinky-web/src/locales/en-US/pages.ts index a1bdde41d3..3bcc72ee77 100644 --- a/dinky-web/src/locales/en-US/pages.ts +++ b/dinky-web/src/locales/en-US/pages.ts @@ -1009,6 +1009,7 @@ export default { 'Support for a single or bulk upload. Strictly prohibited from uploading company data or\n other banned files.', 'rc.resource.filelist': 'File list', 'rc.resource.sync': 'Sync remote files', + 'rc.resource.sync.confirm': 'Please note that this operation will delete all records in the database and will affect running jobs as well as corresponding resource files referenced in UDF management, resulting in job failure. And UDF cannot be used in UDF management Please operate with caution!! Please confirm if you want to continue?', 'rc.resource.copy_to_add_custom_jar': 'Copy as ADD CUSTOMJAR syntax', 'rc.resource.copy_to_add_jar': 'Copy as ADD JAR syntax', 'rc.resource.copy_to_add_file': 'Copy as ADD FILE syntax', diff --git a/dinky-web/src/locales/zh-CN/pages.ts b/dinky-web/src/locales/zh-CN/pages.ts index 67fe8f0005..21db4bbd39 100644 --- a/dinky-web/src/locales/zh-CN/pages.ts +++ b/dinky-web/src/locales/zh-CN/pages.ts @@ -965,6 +965,7 @@ export default { 'rc.resource.upload.tip2': '支持单个或批量上传。严禁上传公司数据或其他禁止上传的文件。', 'rc.resource.filelist': '文件列表', 'rc.resource.sync': '同步目录结构', + 'rc.resource.sync.confirm': '请注意: 该操作会删除数据库内的所有记录,且会关系到运行中的作业,以及UDF管理中引用的对应资源文件.从而导致作业运行失败。以及在 UDF管理中的 UDF 无法被使用. 请谨慎操作!! 请确认是否继续? ', 'rc.resource.copy_to_add_custom_jar': '复制为 ADD CUSTOMJAR 语法', 'rc.resource.copy_to_add_jar': '复制为 ADD JAR 语法', 'rc.resource.copy_to_add_file': '复制为 ADD FILE 语法', diff --git a/dinky-web/src/pages/RegCenter/Resource/components/ResourceOverView/index.tsx b/dinky-web/src/pages/RegCenter/Resource/components/ResourceOverView/index.tsx index d565d9137e..4194e0cb80 100644 --- a/dinky-web/src/pages/RegCenter/Resource/components/ResourceOverView/index.tsx +++ b/dinky-web/src/pages/RegCenter/Resource/components/ResourceOverView/index.tsx @@ -49,7 +49,7 @@ import { WarningOutlined } from '@ant-design/icons'; import { ProCard } from '@ant-design/pro-components'; import { history } from '@umijs/max'; import { useAsyncEffect } from 'ahooks'; -import { Button, Result } from 'antd'; +import {Button, Modal, Result} from 'antd'; import { MenuInfo } from 'rc-menu/es/interface'; import React, { useCallback, useEffect, useRef, useState } from 'react'; import { connect } from 'umi'; @@ -244,8 +244,15 @@ const ResourceOverView: React.FC = (props) => { }; const handleSync = async () => { - await handleGetOption(API_CONSTANTS.RESOURCE_SYNC_DATA, l('rc.resource.sync'), {}); - await refreshTree(); + Modal.confirm({ + title: l('rc.resource.sync'), + content: l('rc.resource.sync.confirm'), + onOk: async () => { + await handleGetOption(API_CONSTANTS.RESOURCE_SYNC_DATA, l('rc.resource.sync'), {}); + await refreshTree(); + } + }); + }; /** From 7c778b53493c56b03d367faea43ae65ae412c8cd Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Tue, 14 May 2024 16:40:17 +0800 Subject: [PATCH 35/47] format Signed-off-by: Zzm0809 <934230207@qq.com> --- .../org/dinky/controller/JarController.java | 13 +--- .../org/dinky/controller/UDFController.java | 14 +--- .../java/org/dinky/data/vo/CascaderVO.java | 2 +- .../main/java/org/dinky/init/SystemInit.java | 1 - .../java/org/dinky/service/TaskService.java | 3 - .../java/org/dinky/service/UDFService.java | 4 +- .../dinky/service/impl/FlinkServiceImpl.java | 5 +- .../dinky/service/impl/TaskServiceImpl.java | 23 +++--- .../dinky/service/impl/UDFServiceImpl.java | 73 ++++++++++++------- .../main/java/org/dinky/utils/UDFUtils.java | 7 +- 10 files changed, 73 insertions(+), 72 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/controller/JarController.java b/dinky-admin/src/main/java/org/dinky/controller/JarController.java index 1c6d865a42..0a41af36d7 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/JarController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/JarController.java @@ -21,22 +21,17 @@ import org.dinky.data.model.Task; import org.dinky.data.result.Result; -import org.dinky.data.vo.CascaderVO; import org.dinky.function.constant.PathConstant; import org.dinky.function.data.model.UDF; import org.dinky.function.util.UDFUtil; import org.dinky.service.TaskService; -import org.dinky.trans.Operations; -import org.dinky.utils.UDFUtils; import org.apache.flink.table.catalog.FunctionLanguage; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; @@ -69,9 +64,9 @@ public Result>> generateJar() { List allUDF = taskService.getReleaseUDF(); List udfCodes = allUDF.stream() .map(task -> UDF.builder() - .code(task.getStatement()) - .className(task.getConfigJson().getUdfConfig().getClassName()) - .functionLanguage( + .code(task.getStatement()) + .className(task.getConfigJson().getUdfConfig().getClassName()) + .functionLanguage( FunctionLanguage.valueOf(task.getDialect().toUpperCase())) .build()) .collect(Collectors.toList()); @@ -83,6 +78,4 @@ public Result>> generateJar() { resultMap.get("failed")); return Result.succeed(resultMap, msg); } - - } diff --git a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java index afd7889c2c..452c90f6ba 100644 --- a/dinky-admin/src/main/java/org/dinky/controller/UDFController.java +++ b/dinky-admin/src/main/java/org/dinky/controller/UDFController.java @@ -19,12 +19,6 @@ package org.dinky.controller; -import cn.hutool.core.collection.CollUtil; -import io.swagger.annotations.ApiOperation; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.stream.Collectors; import org.dinky.data.dto.CommonDTO; import org.dinky.data.model.Resources; import org.dinky.data.model.udf.UDFManage; @@ -34,11 +28,11 @@ import org.dinky.function.data.model.UDF; import org.dinky.service.TaskService; import org.dinky.service.UDFService; +import org.dinky.utils.UDFUtils; import java.util.List; +import java.util.stream.Collectors; -import org.dinky.trans.Operations; -import org.dinky.utils.UDFUtils; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; @@ -47,6 +41,7 @@ import cn.dev33.satoken.annotation.SaCheckLogin; import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -111,7 +106,6 @@ public Result addOrUpdateByResourceId(@RequestBody CommonDTO * get all udf and convert its to cascader * @return {@link Result} of {@link List} of {@link CascaderVO} */ - @GetMapping("/getAllUdfs") @ApiOperation("Get All UDFs") public Result> getAllUdfsToCascader() { @@ -120,6 +114,4 @@ public Result> getAllUdfsToCascader() { taskService.getReleaseUDF().stream().map(UDFUtils::taskToUDF).collect(Collectors.toList()); return Result.succeed(udfService.getAllUdfsToCascader(userDefinedReleaseUdfs)); } - - } diff --git a/dinky-admin/src/main/java/org/dinky/data/vo/CascaderVO.java b/dinky-admin/src/main/java/org/dinky/data/vo/CascaderVO.java index 8944316beb..e7a7c36704 100644 --- a/dinky-admin/src/main/java/org/dinky/data/vo/CascaderVO.java +++ b/dinky-admin/src/main/java/org/dinky/data/vo/CascaderVO.java @@ -55,7 +55,7 @@ public CascaderVO(String label) { this.value = label; } - public CascaderVO(String label , String value) { + public CascaderVO(String label, String value) { this.label = label; this.value = value; } diff --git a/dinky-admin/src/main/java/org/dinky/init/SystemInit.java b/dinky-admin/src/main/java/org/dinky/init/SystemInit.java index e525cebf21..02464ea435 100644 --- a/dinky-admin/src/main/java/org/dinky/init/SystemInit.java +++ b/dinky-admin/src/main/java/org/dinky/init/SystemInit.java @@ -93,7 +93,6 @@ public class SystemInit implements ApplicationRunner { private final TenantService tenantService; private final GitProjectService gitProjectService; private final ScheduleThreadPool schedule; - private static Project project; @Override diff --git a/dinky-admin/src/main/java/org/dinky/service/TaskService.java b/dinky-admin/src/main/java/org/dinky/service/TaskService.java index ab8adbe31b..81e52200d8 100644 --- a/dinky-admin/src/main/java/org/dinky/service/TaskService.java +++ b/dinky-admin/src/main/java/org/dinky/service/TaskService.java @@ -30,11 +30,9 @@ import org.dinky.data.model.Task; import org.dinky.data.model.home.JobModelOverview; import org.dinky.data.model.home.JobTypeOverView; -import org.dinky.data.model.udf.UDFManage; import org.dinky.data.result.Result; import org.dinky.data.result.SqlExplainResult; import org.dinky.explainer.lineage.LineageResult; -import org.dinky.function.data.model.UDF; import org.dinky.gateway.enums.SavePointType; import org.dinky.gateway.result.SavePointResult; import org.dinky.job.JobConfig; @@ -192,7 +190,6 @@ public interface TaskService extends ISuperService { */ Task initDefaultFlinkSQLEnv(Integer tenantId); - /** * Get a list of all release user-defined functions (UDFs) in the system. * @return A list of {@link Task} objects representing the release UDFs. diff --git a/dinky-admin/src/main/java/org/dinky/service/UDFService.java b/dinky-admin/src/main/java/org/dinky/service/UDFService.java index cc5d1b5534..34e1bf91f8 100644 --- a/dinky-admin/src/main/java/org/dinky/service/UDFService.java +++ b/dinky-admin/src/main/java/org/dinky/service/UDFService.java @@ -23,10 +23,10 @@ import org.dinky.data.model.udf.UDFManage; import org.dinky.data.vo.CascaderVO; import org.dinky.data.vo.UDFManageVO; +import org.dinky.function.data.model.UDF; import java.util.List; -import org.dinky.function.data.model.UDF; import org.springframework.transaction.annotation.Transactional; import com.baomidou.mybatisplus.extension.service.IService; @@ -64,11 +64,9 @@ public interface UDFService extends IService { */ List getUDFFromUdfManage(); - /** * get all udf to cascader list * @return List */ List getAllUdfsToCascader(List userDefinedReleaseUdfs); - } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/FlinkServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/FlinkServiceImpl.java index 2261997371..54bc6e1c69 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/FlinkServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/FlinkServiceImpl.java @@ -49,14 +49,15 @@ public List loadConfigOptions() { List flinkConfigOptions = FlinkConfigOptionsUtils.loadOptionsByClassName(name); String binlogGroup = FlinkConfigOptionsUtils.parsedBinlogGroup(name); List child = flinkConfigOptions.stream() - .map(conf -> new CascaderVO(conf.getKey(),conf.getKey())) + .map(conf -> new CascaderVO(conf.getKey(), conf.getKey())) .collect(Collectors.toList()); CascaderVO cascaderVO = new CascaderVO(binlogGroup, child); dataList.add(cascaderVO); } List voList = documentService.lambdaQuery().eq(Document::getType, "FLINK_OPTIONS").list().stream() - .map(d -> new CascaderVO(d.getName().replace("set ", ""),d.getName().replace("set ", ""))) + .map(d -> new CascaderVO( + d.getName().replace("set ", ""), d.getName().replace("set ", ""))) .collect(Collectors.toList()); CascaderVO cascaderVO = new CascaderVO(); diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java index b5042b9145..ddf6625f2e 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/TaskServiceImpl.java @@ -19,10 +19,6 @@ package org.dinky.service.impl; -import java.util.Collections; -import java.util.LinkedList; -import java.util.stream.Collectors; -import mssql.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap; import org.dinky.assertion.Asserts; import org.dinky.assertion.DinkyAssert; import org.dinky.config.Dialect; @@ -47,7 +43,6 @@ import org.dinky.data.model.ClusterConfiguration; import org.dinky.data.model.ClusterInstance; import org.dinky.data.model.DataBase; -import org.dinky.data.model.Resources; import org.dinky.data.model.Savepoints; import org.dinky.data.model.SystemConfiguration; import org.dinky.data.model.Task; @@ -58,7 +53,6 @@ import org.dinky.data.model.home.JobModelOverview; import org.dinky.data.model.home.JobTypeOverView; import org.dinky.data.model.job.JobInstance; -import org.dinky.data.model.udf.UDFManage; import org.dinky.data.model.udf.UDFTemplate; import org.dinky.data.result.Result; import org.dinky.data.result.SqlExplainResult; @@ -66,7 +60,6 @@ import org.dinky.explainer.lineage.LineageResult; import org.dinky.explainer.sqllineage.SQLLineageBuilder; import org.dinky.function.compiler.CustomStringJavaCompiler; -import org.dinky.function.data.model.UDF; import org.dinky.function.pool.UdfCodePool; import org.dinky.function.util.UDFUtil; import org.dinky.gateway.enums.SavePointStrategy; @@ -92,8 +85,8 @@ import org.dinky.service.UDFService; import org.dinky.service.UDFTemplateService; import org.dinky.service.UserService; -import org.dinky.service.resource.ResourcesService; import org.dinky.service.catalogue.CatalogueService; +import org.dinky.service.resource.ResourcesService; import org.dinky.service.task.BaseTask; import org.dinky.utils.FragmentVariableUtils; import org.dinky.utils.JsonUtils; @@ -115,6 +108,7 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.stream.Collectors; import javax.annotation.Resource; @@ -718,11 +712,16 @@ public JobModelOverview getJobStreamingOrBatchModelOverview() { @Override public List getReleaseUDF() { return list(new LambdaQueryWrapper() - .in(Task::getDialect, Dialect.JAVA.getValue(), Dialect.SCALA.getValue(), Dialect.PYTHON.getValue()) - .eq(Task::getEnabled, 1) - .eq(Task::getStep, JobLifeCycle.PUBLISH.getValue())) + .in( + Task::getDialect, + Dialect.JAVA.getValue(), + Dialect.SCALA.getValue(), + Dialect.PYTHON.getValue()) + .eq(Task::getEnabled, 1) + .eq(Task::getStep, JobLifeCycle.PUBLISH.getValue())) .stream() - .filter(task -> Asserts.isNotNullString(task.getConfigJson().getUdfConfig().getClassName())) + .filter(task -> Asserts.isNotNullString( + task.getConfigJson().getUdfConfig().getClassName())) .collect(Collectors.toList()); } diff --git a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java index b4f8f6b573..4819893d68 100644 --- a/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java +++ b/dinky-admin/src/main/java/org/dinky/service/impl/UDFServiceImpl.java @@ -19,24 +19,6 @@ package org.dinky.service.impl; -import cn.hutool.core.collection.CollUtil; -import cn.hutool.core.io.FileUtil; -import cn.hutool.core.lang.Assert; -import cn.hutool.core.util.CharUtil; -import cn.hutool.core.util.StrUtil; -import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; -import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; -import java.io.File; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.table.catalog.FunctionLanguage; import org.dinky.config.Dialect; import org.dinky.data.model.Resources; import org.dinky.data.model.udf.UDFManage; @@ -44,14 +26,34 @@ import org.dinky.data.vo.UDFManageVO; import org.dinky.function.data.model.UDF; import org.dinky.mapper.UDFManageMapper; -import org.dinky.service.TaskService; import org.dinky.service.UDFService; import org.dinky.service.resource.ResourcesService; import org.dinky.trans.Operations; import org.dinky.utils.UDFUtils; + +import org.apache.flink.table.catalog.FunctionLanguage; + +import java.io.File; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; +import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper; +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +import cn.hutool.core.collection.CollUtil; +import cn.hutool.core.io.FileUtil; +import cn.hutool.core.lang.Assert; +import cn.hutool.core.util.CharUtil; +import cn.hutool.core.util.StrUtil; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + /** * @since 0.6.8 */ @@ -61,7 +63,6 @@ public class UDFServiceImpl extends ServiceImpl implements UDFService { private final ResourcesService resourcesService; - @Override public boolean update(UDFManage entity) { Assert.notNull(entity, "Entity must be not null"); @@ -161,10 +162,16 @@ public List getUDFFromUdfManage() { // 1. get all resources List resourcesList = resourcesService.list(); // 2. get all udf from udf manage and then filter the udf by resources id in resources list - List collect = this.list().stream().filter(udf -> resourcesList.stream().anyMatch(resources -> resources.getId().equals(udf.getResourcesId()))) + List collect = this.list().stream() + .filter(udf -> resourcesList.stream() + .anyMatch(resources -> resources.getId().equals(udf.getResourcesId()))) .collect(Collectors.toList()); // 去重 根据 className 去重 || distinct by className - return collect.stream().collect(Collectors.toMap(UDFManage::getClassName, udf -> udf, (a, b) -> a)).values().stream().collect(Collectors.toList()); + return collect.stream() + .collect(Collectors.toMap(UDFManage::getClassName, udf -> udf, (a, b) -> a)) + .values() + .stream() + .collect(Collectors.toList()); } /** @@ -178,11 +185,25 @@ public List getAllUdfsToCascader(List userDefinedReleaseUdfs) { List staticUdfs = Operations.getCustomStaticUdfs(); // get all UDFs of UDFManage table - List udfManageDynamic = getUDFFromUdfManage().stream().map(UDFUtils::resourceUdfManageToUDF).collect(Collectors.toList()); + List udfManageDynamic = getUDFFromUdfManage().stream() + .map(UDFUtils::resourceUdfManageToUDF) + .collect(Collectors.toList()); - CascaderVO staticUdfCascaderVO = new CascaderVO("Flink Static UDF", staticUdfs.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); - CascaderVO userDefinedUdfCascaderVO = new CascaderVO("User Defined Release UDF", userDefinedReleaseUdfs.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); - CascaderVO udfManageDynamicCascaderVO = new CascaderVO("From UDF Manage", udfManageDynamic.stream().map(udf -> new CascaderVO(udf.getClassName(),udf.getClassName())).collect(Collectors.toList())); + CascaderVO staticUdfCascaderVO = new CascaderVO( + "Flink Static UDF", + staticUdfs.stream() + .map(udf -> new CascaderVO(udf.getClassName(), udf.getClassName())) + .collect(Collectors.toList())); + CascaderVO userDefinedUdfCascaderVO = new CascaderVO( + "User Defined Release UDF", + userDefinedReleaseUdfs.stream() + .map(udf -> new CascaderVO(udf.getClassName(), udf.getClassName())) + .collect(Collectors.toList())); + CascaderVO udfManageDynamicCascaderVO = new CascaderVO( + "From UDF Manage", + udfManageDynamic.stream() + .map(udf -> new CascaderVO(udf.getClassName(), udf.getClassName())) + .collect(Collectors.toList())); List result = new LinkedList<>(); result.add(staticUdfCascaderVO); diff --git a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java index 5c666a85ce..5345f81968 100644 --- a/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java +++ b/dinky-admin/src/main/java/org/dinky/utils/UDFUtils.java @@ -48,11 +48,12 @@ public static UDF resourceUdfManageToUDF(UDFManage udfManage) { return UDF.builder() .name(udfManage.getName()) .className(udfManage.getClassName()) - .functionLanguage(FunctionLanguage.valueOf(udfManage.getLanguage().toUpperCase())) + .functionLanguage( + FunctionLanguage.valueOf(udfManage.getLanguage().toUpperCase())) .build(); } else { - throw new BusException("udf `class` config is null, Please check if the resource file to which this udf belongs exists"); + throw new BusException( + "udf `class` config is null, Please check if the resource file to which this udf belongs exists"); } } - } From c05f7303220cc9753761bd97e217e1c6bc248cfc Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Mon, 20 May 2024 15:40:59 +0800 Subject: [PATCH 36/47] Modify SQL file and README.md Signed-off-by: Zzm0809 <934230207@qq.com> --- .../src/main/resources/db/migration/README.md | 33 ++++++++++++++++++- .../db/migration/h2/R1.1.0__release.sql | 32 ------------------ .../db/migration/h2/V1.1.0__release.sql | 2 ++ .../db/migration/mysql/R1.1.0__release.sql | 2 ++ .../db/migration/mysql/V1.1.0__release.sql | 4 +++ .../db/migration/pgsql/R1.1.0__release.sql | 5 ++- .../db/migration/pgsql/V1.1.0__release.sql | 17 +++++++++- 7 files changed, 60 insertions(+), 35 deletions(-) delete mode 100644 dinky-admin/src/main/resources/db/migration/h2/R1.1.0__release.sql diff --git a/dinky-admin/src/main/resources/db/migration/README.md b/dinky-admin/src/main/resources/db/migration/README.md index 41b83d7f4e..6adbbd1904 100644 --- a/dinky-admin/src/main/resources/db/migration/README.md +++ b/dinky-admin/src/main/resources/db/migration/README.md @@ -20,6 +20,23 @@ - V{版本号}__{描述}.sql 中间是**两个下划线**,固定规则,不符合规则将无法执行 - 每个版本只能有一个 V{版本号}__{描述}.sql 文件,否则将无法执行, 不管是 DDL 还是 DML 统一放在一个文件中 + + +**升级脚本注意事项:** +- 如果你需要对某一个表添加字段,请不要使用`alter table add column`语句,使用如下语句: + - MySQL: `CALL add_column_if_not_exists('tableName', 'columnName', 'dataType', 'defaultValue', 'comment');` + - eg: `CALL add_column_if_not_exists('user', 'age', 'int', '0', 'age');` + - PostgresSQL: `SELECT add_column_if_not_exists('model_name', 'table_name', 'column_name', 'data_type', 'default_value', 'comment');` + - eg: `SELECT add_column_if_not_exists('public', 'user', 'age', 'int', '0', 'age');` + + +**其他注意事项:** +- 在你贡献代码时,如若涉及到了变更表结构,请添加回滚脚本,虽然 FlyWay 会有事务回滚操作,回滚脚本不会被 FlyWay 自动自行,但是为了本地调试测试时能方便进行回滚,所以添加回滚脚本 +- 由于数据库类型不同,可能存在差异,请根据实际需求进行迭代增加脚本内容 +- H2 数据库脚本需要按照规范进行正常的版本迭代(方便版本管理),但是 H2 数据库脚本不需要添加回滚脚本,因为 H2 数据库是内存数据库(默认程序启动时配置为内存模式,未持久化),每次启动都会重新创建,所以不需要回滚脚本 + +--- + # English ## Pre requirements @@ -37,4 +54,18 @@ **Attention:** - V{version number}__{description}.SQL has two underscores in the middle, which are fixed rules. If they do not comply with the rules, they cannot be executed -- Each version can only have one V{version number}__{description}.sql file, otherwise it will not be executed, whether it is DDL or DML, it will be placed in one file \ No newline at end of file +- Each version can only have one V{version number}__{description}.sql file, otherwise it will not be executed, whether it is DDL or DML, it will be placed in one file + + +**Upgrade script considerations:** +- If you need to add fields to a table, do not use the 'alter table add column' statement. Instead, use the following statement: + - MySQL: `CALL add_column_if_not_exists('tableName', 'columnName', 'dataType', 'defaultValue', 'comment');` + - eg: `CALL add_column_if_not_exists('user', 'age', 'int', '0', 'age');` + - PostgresSQL: `SELECT add_column_if_not_exists('model_name', 'table_name', 'column_name', 'data_type', 'default_value', 'comment');` + - eg: `SELECT add_column_if_not_exists('public', 'user', 'age', 'int', '0', 'age');` + + +**Other precautions:** +- When you contribute code, if it involves changing the table structure, please add a rollback script. Although FlyWay may have transaction rollback operations, the rollback script will not be automatically rolled back by FlyWay. However, in order to facilitate rollback during local debugging and testing, add a rollback script +- Due to different database types, there may be differences. Please iterate and add script content according to actual needs +- The H2 database script needs to perform normal version iteration according to the specifications (for easy version management), but the H2 database script does not need to add a rollback script because the H2 database is an in memory database (configured in memory mode by default when the program starts, not persistent), and will be recreated every time it starts, so there is no need to add a rollback script \ No newline at end of file diff --git a/dinky-admin/src/main/resources/db/migration/h2/R1.1.0__release.sql b/dinky-admin/src/main/resources/db/migration/h2/R1.1.0__release.sql deleted file mode 100644 index 5dfb9a80a8..0000000000 --- a/dinky-admin/src/main/resources/db/migration/h2/R1.1.0__release.sql +++ /dev/null @@ -1,32 +0,0 @@ --- note: Rolling back SQL statements is only necessary to perform a rollback operation in the event of an automatic upgrade failure. The following SQL statements need to be manually executed - -update dinky_sys_menu set `path`='/settings/alertrule', - `component`='./SettingCenter/AlertRule', - `perms`='settings:alertrule', - `parent_id`=6 -where `id` = 116; - -update dinky_sys_menu set `path`='/settings/alertrule/add', - `perms`='settings:alertrule:add' -where `id` = 117; -update dinky_sys_menu set `path`='/settings/alertrule/delete', - `perms`='settings:alertrule:delete' -where `id` = 118; -update dinky_sys_menu set `path`='/settings/alertrule/edit', - `perms`='settings:alertrule:edit' -where `id` = 119; - -ALTER TABLE dinky_task DROP COLUMN `first_level_owner`; -ALTER TABLE dinky_task DROP COLUMN `second_level_owners`; - - -ALTER TABLE dinky_udf_manage ALTER COLUMN class_name SET DATA TYPE VARCHAR(50); - -ALTER TABLE dinky_history ALTER COLUMN statement SET DATA TYPE text ; - -ALTER TABLE dinky_task ALTER COLUMN statement SET DATA TYPE text ; - -ALTER TABLE dinky_task_version ALTER COLUMN statement SET DATA TYPE text ; - --- Delete the 1.1.0 record in the _dinky_flyway_schema_history table -DELETE FROM `_dinky_flyway_schema_history` WHERE version = '1.1.0'; \ No newline at end of file diff --git a/dinky-admin/src/main/resources/db/migration/h2/V1.1.0__release.sql b/dinky-admin/src/main/resources/db/migration/h2/V1.1.0__release.sql index 50c8828669..eccc2ad07a 100644 --- a/dinky-admin/src/main/resources/db/migration/h2/V1.1.0__release.sql +++ b/dinky-admin/src/main/resources/db/migration/h2/V1.1.0__release.sql @@ -42,3 +42,5 @@ ALTER TABLE dinky_history ALTER COLUMN statement SET DATA TYPE LONGVARCHAR ; ALTER TABLE dinky_task ALTER COLUMN statement SET DATA TYPE LONGVARCHAR ; ALTER TABLE dinky_task_version ALTER COLUMN statement SET DATA TYPE LONGVARCHAR ; + +alter table dinky_udf_manage add column `language` VARCHAR(10) DEFAULT null comment 'udf language' ; diff --git a/dinky-admin/src/main/resources/db/migration/mysql/R1.1.0__release.sql b/dinky-admin/src/main/resources/db/migration/mysql/R1.1.0__release.sql index 9fb31b5a82..d232e70686 100644 --- a/dinky-admin/src/main/resources/db/migration/mysql/R1.1.0__release.sql +++ b/dinky-admin/src/main/resources/db/migration/mysql/R1.1.0__release.sql @@ -31,5 +31,7 @@ ALTER TABLE dinky_task_version CHANGE COLUMN `statement` `statement` longtext DE # Delete the 1.1.0 record in the _dinky_flyway_schema_history table DELETE FROM `_dinky_flyway_schema_history` WHERE version = '1.1.0'; +ALTER TABLE dinky_udf_manage DROP COLUMN `language`; + SET FOREIGN_KEY_CHECKS = 1; diff --git a/dinky-admin/src/main/resources/db/migration/mysql/V1.1.0__release.sql b/dinky-admin/src/main/resources/db/migration/mysql/V1.1.0__release.sql index 544654aa82..e59992a803 100644 --- a/dinky-admin/src/main/resources/db/migration/mysql/V1.1.0__release.sql +++ b/dinky-admin/src/main/resources/db/migration/mysql/V1.1.0__release.sql @@ -68,4 +68,8 @@ ALTER TABLE dinky_task CHANGE COLUMN `statement` `statement` mediumtext DEFAULT ALTER TABLE dinky_task_version CHANGE COLUMN `statement` `statement` mediumtext DEFAULT NULL COMMENT 'flink sql statement'; +alter table dinky_udf_manage add column `language` VARCHAR(10) DEFAULT null comment 'udf language' after class_name; + +CALL add_column_if_not_exists('dinky_udf_manage', 'language', 'varchar(10)', 'NULL', 'udf language'); + SET FOREIGN_KEY_CHECKS = 1; diff --git a/dinky-admin/src/main/resources/db/migration/pgsql/R1.1.0__release.sql b/dinky-admin/src/main/resources/db/migration/pgsql/R1.1.0__release.sql index 55ff70fc24..3630ef744b 100644 --- a/dinky-admin/src/main/resources/db/migration/pgsql/R1.1.0__release.sql +++ b/dinky-admin/src/main/resources/db/migration/pgsql/R1.1.0__release.sql @@ -20,4 +20,7 @@ ALTER TABLE public.dinky_task DROP COLUMN "second_level_owners"; -- Delete the 1.1.0 record in the _dinky_flyway_schema_history table -DELETE FROM public."_dinky_flyway_schema_history" WHERE version = '1.1.0'; \ No newline at end of file +DELETE FROM public."_dinky_flyway_schema_history" WHERE version = '1.1.0'; + +-- 删除 SELECT add_column_if_not_exists('public','dinky_udf_manage', 'language', 'varchar(10)', 'null', 'udf language'); +alter table public.dinky_udf_manage drop column "language"; diff --git a/dinky-admin/src/main/resources/db/migration/pgsql/V1.1.0__release.sql b/dinky-admin/src/main/resources/db/migration/pgsql/V1.1.0__release.sql index 2a9701a2ef..ac4f3bc59a 100644 --- a/dinky-admin/src/main/resources/db/migration/pgsql/V1.1.0__release.sql +++ b/dinky-admin/src/main/resources/db/migration/pgsql/V1.1.0__release.sql @@ -116,4 +116,19 @@ SELECT add_column_if_not_exists('public','dinky_task', 'second_level_owners', 'v update public.dinky_task set "first_level_owner" = "creator"; -UPDATE public.dinky_user SET "password" = 'f4b3a484ee745b98d64cd69c429b2aa2' WHERE "id" =1 and "password"= '21232f297a57a5a743894a0e4a801fc3'; \ No newline at end of file +UPDATE public.dinky_user SET "password" = 'f4b3a484ee745b98d64cd69c429b2aa2' WHERE "id" =1 and "password"= '21232f297a57a5a743894a0e4a801fc3'; + +SELECT add_column_if_not_exists('public','dinky_udf_manage', 'language', 'varchar(10)', 'null', 'udf language'); + +UPDATE + dinky_udf_manage duml +SET + "language" = + CASE + WHEN r.file_name LIKE '%.zip' OR r.file_name LIKE '%.py' THEN 'python' + WHEN r.file_name LIKE '%.jar' THEN 'java' + ELSE 'unknown' + END + FROM dinky_resources r +WHERE + duml.resources_id = r.id; From 9bfd97218bd1af38b07987c87ef1eb19a996054b Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Mon, 20 May 2024 15:59:38 +0800 Subject: [PATCH 37/47] Add Meta Information Table for Upgrading to 1.0.0 Signed-off-by: Zzm0809 <934230207@qq.com> --- .../upgrade/1.1.0_schema/mysql/dinky_ddl.sql | 29 +++++++------ .../upgrade/1.1.0_schema/mysql/dinky_dml.sql | 42 ++----------------- .../1.1.0_schema/postgre/dinky_ddl.sql | 31 +++++++------- .../1.1.0_schema/postgre/dinky_dml.sql | 41 +++--------------- 4 files changed, 40 insertions(+), 103 deletions(-) diff --git a/script/sql/upgrade/1.1.0_schema/mysql/dinky_ddl.sql b/script/sql/upgrade/1.1.0_schema/mysql/dinky_ddl.sql index 584f6e159c..eb29ae771f 100644 --- a/script/sql/upgrade/1.1.0_schema/mysql/dinky_ddl.sql +++ b/script/sql/upgrade/1.1.0_schema/mysql/dinky_ddl.sql @@ -20,21 +20,20 @@ SET NAMES utf8mb4; SET FOREIGN_KEY_CHECKS = 0; --- Increase class_name column's length from 50 to 100. -ALTER TABLE dinky_udf_manage CHANGE COLUMN class_name class_name VARCHAR(100) null DEFAULT null COMMENT 'Complete class name'; +CREATE TABLE `_dinky_flyway_schema_history` ( + `installed_rank` int NOT NULL, + `version` varchar(50) COLLATE utf8mb4_general_ci DEFAULT NULL, + `description` varchar(200) COLLATE utf8mb4_general_ci NOT NULL, + `type` varchar(20) COLLATE utf8mb4_general_ci NOT NULL, + `script` varchar(1000) COLLATE utf8mb4_general_ci NOT NULL, + `checksum` int DEFAULT NULL, + `installed_by` varchar(100) COLLATE utf8mb4_general_ci NOT NULL, + `installed_on` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `execution_time` int NOT NULL, + `success` tinyint(1) NOT NULL, + PRIMARY KEY (`installed_rank`), + KEY `_dinky_flyway_schema_history_s_idx` (`success`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; -ALTER TABLE dinky_task - add COLUMN `first_level_owner` int DEFAULT NULL comment 'primary responsible person id'; - -ALTER TABLE dinky_task - add COLUMN `second_level_owners` varchar(128) DEFAULT NULL comment 'list of secondary responsible persons ids'; - -alter table dinky_udf_manage add column `language` VARCHAR(10) DEFAULT null comment 'udf language' after class_name; - -ALTER TABLE dinky_history CHANGE COLUMN `statement` `statement` mediumtext DEFAULT NULL COMMENT 'statement set'; - -ALTER TABLE dinky_task CHANGE COLUMN `statement` `statement` mediumtext DEFAULT NULL COMMENT 'sql statement'; - -ALTER TABLE dinky_task_version CHANGE COLUMN `statement` `statement` mediumtext DEFAULT NULL COMMENT 'flink sql statement'; SET FOREIGN_KEY_CHECKS = 1; diff --git a/script/sql/upgrade/1.1.0_schema/mysql/dinky_dml.sql b/script/sql/upgrade/1.1.0_schema/mysql/dinky_dml.sql index 6060c924dd..45c2a7e6b4 100644 --- a/script/sql/upgrade/1.1.0_schema/mysql/dinky_dml.sql +++ b/script/sql/upgrade/1.1.0_schema/mysql/dinky_dml.sql @@ -22,43 +22,9 @@ SET NAMES utf8mb4; SET FOREIGN_KEY_CHECKS = 0; begin; - -update dinky_sys_menu -set `path`='/registration/alert/rule', - `component`='./RegCenter/Alert/AlertRule', - `perms`='registration:alert:rule', - `parent_id`=12 -where `id` = 116; - -update dinky_sys_menu -set `path`='/registration/alert/rule/add', - `perms`='registration:alert:rule:add' -where `id` = 117; - -update dinky_sys_menu -set `path`='/registration/alert/rule/delete', - `perms`='registration:alert:rule:delete' -where `id` = 118; - -update dinky_sys_menu -set `path`='/registration/alert/rule/edit', - `perms`='registration:alert:rule:edit' -where `id` = 119; - -update dinky_task set first_level_owner = creator; - - -UPDATE - dinky_udf_manage duml - JOIN - dinky_resources r ON duml.resources_id = r.id -SET - duml.`language` = - CASE - WHEN r.file_name LIKE '%.zip' OR r.file_name LIKE '%.py' THEN 'python' - WHEN r.file_name LIKE '%.jar' THEN 'java' - ELSE 'unknown' - END; - +INSERT INTO `_dinky_flyway_schema_history` (`installed_rank`, `version`, `description`, `type`, `script`, `checksum`, + `installed_by`, `installed_on`, `execution_time`, `success`) +VALUES (1, '1.0.2', '<< Flyway Baseline >>', 'BASELINE', '<< Flyway Baseline >>', NULL, 'root', '2024-05-20 01:32:29', + 0, 1); commit ; diff --git a/script/sql/upgrade/1.1.0_schema/postgre/dinky_ddl.sql b/script/sql/upgrade/1.1.0_schema/postgre/dinky_ddl.sql index fda000f648..076f06e93a 100644 --- a/script/sql/upgrade/1.1.0_schema/postgre/dinky_ddl.sql +++ b/script/sql/upgrade/1.1.0_schema/postgre/dinky_ddl.sql @@ -17,18 +17,21 @@ * */ --- Increase class_name column's length from 50 to 100. -ALTER TABLE dinky_udf_manage ALTER COLUMN class_name TYPE VARCHAR(100); - -COMMENT ON COLUMN dinky_udf_manage.class_name IS 'Complete class name'; - -alter table dinky_task add column first_level_owner int; -alter table dinky_task add column second_level_owners varchar(128); -COMMENT ON COLUMN dinky_task.first_level_owner IS 'primary responsible person id'; -COMMENT ON COLUMN dinky_task.second_level_owners IS 'list of secondary responsible persons ids'; - - - -ALTER TABLE dinky_udf_manage ADD COLUMN "language" VARCHAR(10) DEFAULT NULL; -COMMENT ON COLUMN dinky_udf_manage.language IS 'udf language'; +CREATE TABLE "public"."_dinky_flyway_schema_history" ( + "installed_rank" int4 NOT NULL, + "version" varchar(50) COLLATE "pg_catalog"."default", + "description" varchar(200) COLLATE "pg_catalog"."default" NOT NULL, + "type" varchar(20) COLLATE "pg_catalog"."default" NOT NULL, + "script" varchar(1000) COLLATE "pg_catalog"."default" NOT NULL, + "checksum" int4, + "installed_by" varchar(100) COLLATE "pg_catalog"."default" NOT NULL, + "installed_on" timestamp(6) NOT NULL DEFAULT now(), + "execution_time" int4 NOT NULL, + "success" bool NOT NULL DEFAULT false, + CONSTRAINT "_dinky_flyway_schema_history_pk" PRIMARY KEY ("installed_rank") +) +; +CREATE INDEX "_dinky_flyway_schema_history_s_idx" ON "public"."_dinky_flyway_schema_history" USING btree ( + "success" "pg_catalog"."bool_ops" ASC NULLS LAST + ); \ No newline at end of file diff --git a/script/sql/upgrade/1.1.0_schema/postgre/dinky_dml.sql b/script/sql/upgrade/1.1.0_schema/postgre/dinky_dml.sql index 31a592df0b..f58e7101b8 100644 --- a/script/sql/upgrade/1.1.0_schema/postgre/dinky_dml.sql +++ b/script/sql/upgrade/1.1.0_schema/postgre/dinky_dml.sql @@ -18,39 +18,8 @@ */ -update dinky_sys_menu -set "path"='/registration/alert/rule', - "component"='./RegCenter/Alert/AlertRule', - "perms"='registration:alert:rule', - "parent_id"=12 -where "id" = 116; - -update dinky_sys_menu -set "path"='/registration/alert/rule/add', - "perms"='registration:alert:rule:add' -where "id" = 117; - -update dinky_sys_menu -set "path"='/registration/alert/rule/delete', - "perms"='registration:alert:rule:delete' -where "id" = 118; - -update dinky_sys_menu -set "path"='/registration/alert/rule/edit', - "perms"='registration:alert:rule:edit' -where "id" = 119; - -update dinky_task set "first_level_owner" = "creator"; - -UPDATE - dinky_udf_manage duml -SET - "language" = - CASE - WHEN r.file_name LIKE '%.zip' OR r.file_name LIKE '%.py' THEN 'python' - WHEN r.file_name LIKE '%.jar' THEN 'java' - ELSE 'unknown' - END - FROM dinky_resources r -WHERE - duml.resources_id = r.id; +INSERT INTO "public"."_dinky_flyway_schema_history" ("installed_rank", "version", "description", "type", "script", + "checksum", "installed_by", "installed_on", "execution_time", + "success") +VALUES (1, '1.0.2', '<< Flyway Baseline >>', 'BASELINE', '<< Flyway Baseline >>', NULL, 'null', + '2024-05-17 17:25:43.682212', 0, 't'); \ No newline at end of file From aade05a3795b5af2dbb4732119788547089e1848 Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Mon, 20 May 2024 15:35:09 +0800 Subject: [PATCH 38/47] Optimize the length of resource names (#3513) Signed-off-by: Zzm0809 <934230207@qq.com> --- .../src/main/resources/db/migration/h2/V1.1.0__release.sql | 1 + .../main/resources/db/migration/mysql/R1.1.0__release.sql | 3 +++ .../main/resources/db/migration/mysql/V1.1.0__release.sql | 4 ++++ .../main/resources/db/migration/pgsql/R1.1.0__release.sql | 5 +++++ .../main/resources/db/migration/pgsql/V1.1.0__release.sql | 2 ++ 5 files changed, 15 insertions(+) diff --git a/dinky-admin/src/main/resources/db/migration/h2/V1.1.0__release.sql b/dinky-admin/src/main/resources/db/migration/h2/V1.1.0__release.sql index eccc2ad07a..aeadfe6eb9 100644 --- a/dinky-admin/src/main/resources/db/migration/h2/V1.1.0__release.sql +++ b/dinky-admin/src/main/resources/db/migration/h2/V1.1.0__release.sql @@ -43,4 +43,5 @@ ALTER TABLE dinky_task ALTER COLUMN statement SET DATA TYPE LONGVARCHAR ; ALTER TABLE dinky_task_version ALTER COLUMN statement SET DATA TYPE LONGVARCHAR ; +ALTER TABLE dinky_resources ALTER COLUMN `file_name` SET DATA TYPE TEXT; alter table dinky_udf_manage add column `language` VARCHAR(10) DEFAULT null comment 'udf language' ; diff --git a/dinky-admin/src/main/resources/db/migration/mysql/R1.1.0__release.sql b/dinky-admin/src/main/resources/db/migration/mysql/R1.1.0__release.sql index d232e70686..b40f34f962 100644 --- a/dinky-admin/src/main/resources/db/migration/mysql/R1.1.0__release.sql +++ b/dinky-admin/src/main/resources/db/migration/mysql/R1.1.0__release.sql @@ -34,4 +34,7 @@ DELETE FROM `_dinky_flyway_schema_history` WHERE version = '1.1.0'; ALTER TABLE dinky_udf_manage DROP COLUMN `language`; +ALTER TABLE dinky_resources CHANGE COLUMN `file_name` `file_name` varchar(64) DEFAULT NULL COMMENT 'file name'; + + SET FOREIGN_KEY_CHECKS = 1; diff --git a/dinky-admin/src/main/resources/db/migration/mysql/V1.1.0__release.sql b/dinky-admin/src/main/resources/db/migration/mysql/V1.1.0__release.sql index e59992a803..869869f299 100644 --- a/dinky-admin/src/main/resources/db/migration/mysql/V1.1.0__release.sql +++ b/dinky-admin/src/main/resources/db/migration/mysql/V1.1.0__release.sql @@ -68,6 +68,10 @@ ALTER TABLE dinky_task CHANGE COLUMN `statement` `statement` mediumtext DEFAULT ALTER TABLE dinky_task_version CHANGE COLUMN `statement` `statement` mediumtext DEFAULT NULL COMMENT 'flink sql statement'; +ALTER TABLE dinky_resources CHANGE COLUMN `file_name` `file_name` text DEFAULT NULL COMMENT 'file name'; + + + alter table dinky_udf_manage add column `language` VARCHAR(10) DEFAULT null comment 'udf language' after class_name; CALL add_column_if_not_exists('dinky_udf_manage', 'language', 'varchar(10)', 'NULL', 'udf language'); diff --git a/dinky-admin/src/main/resources/db/migration/pgsql/R1.1.0__release.sql b/dinky-admin/src/main/resources/db/migration/pgsql/R1.1.0__release.sql index 3630ef744b..7943a69509 100644 --- a/dinky-admin/src/main/resources/db/migration/pgsql/R1.1.0__release.sql +++ b/dinky-admin/src/main/resources/db/migration/pgsql/R1.1.0__release.sql @@ -22,5 +22,10 @@ ALTER TABLE public.dinky_task DROP COLUMN "second_level_owners"; -- Delete the 1.1.0 record in the _dinky_flyway_schema_history table DELETE FROM public."_dinky_flyway_schema_history" WHERE version = '1.1.0'; +UPDATE public.dinky_user SET "password" = '21232f297a57a5a743894a0e4a801fc3' WHERE "id" =1 and "password"= 'f4b3a484ee745b98d64cd69c429b2aa2'; + + +ALTER TABLE public.dinky_resources ALTER COLUMN file_name TYPE varchar(64); + -- 删除 SELECT add_column_if_not_exists('public','dinky_udf_manage', 'language', 'varchar(10)', 'null', 'udf language'); alter table public.dinky_udf_manage drop column "language"; diff --git a/dinky-admin/src/main/resources/db/migration/pgsql/V1.1.0__release.sql b/dinky-admin/src/main/resources/db/migration/pgsql/V1.1.0__release.sql index ac4f3bc59a..8f47b01eea 100644 --- a/dinky-admin/src/main/resources/db/migration/pgsql/V1.1.0__release.sql +++ b/dinky-admin/src/main/resources/db/migration/pgsql/V1.1.0__release.sql @@ -118,6 +118,8 @@ update public.dinky_task set "first_level_owner" = "creator"; UPDATE public.dinky_user SET "password" = 'f4b3a484ee745b98d64cd69c429b2aa2' WHERE "id" =1 and "password"= '21232f297a57a5a743894a0e4a801fc3'; +ALTER TABLE public.dinky_resources ALTER COLUMN file_name TYPE TEXT; + SELECT add_column_if_not_exists('public','dinky_udf_manage', 'language', 'varchar(10)', 'null', 'udf language'); UPDATE From 95c404536e0ecf1bb4f1dda84591955cc6addb89 Mon Sep 17 00:00:00 2001 From: Zzm0809 <934230207@qq.com> Date: Mon, 20 May 2024 15:36:46 +0800 Subject: [PATCH 39/47] [Bug] Fix inability to login when tenant is not set up (#3512) Signed-off-by: Zzm0809 <934230207@qq.com> Co-authored-by: Zzm0809 --- .../org/dinky/context/TenantContextHolder.java | 3 +-- .../mybatis/handler/DateMetaObjectHandler.java | 15 +++++++++++++-- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/dinky-admin/src/main/java/org/dinky/context/TenantContextHolder.java b/dinky-admin/src/main/java/org/dinky/context/TenantContextHolder.java index 950a18b616..1af96f3a39 100644 --- a/dinky-admin/src/main/java/org/dinky/context/TenantContextHolder.java +++ b/dinky-admin/src/main/java/org/dinky/context/TenantContextHolder.java @@ -40,8 +40,7 @@ public static void set(Object value) { } public static Object get() { - return Optional.ofNullable(TENANT_CONTEXT.get()) - .orElseThrow(() -> new IllegalStateException("current Tenant context is not set")); + return TENANT_CONTEXT.get(); } public static void clear() { diff --git a/dinky-admin/src/main/java/org/dinky/mybatis/handler/DateMetaObjectHandler.java b/dinky-admin/src/main/java/org/dinky/mybatis/handler/DateMetaObjectHandler.java index 09a84aca81..0961ee31b1 100644 --- a/dinky-admin/src/main/java/org/dinky/mybatis/handler/DateMetaObjectHandler.java +++ b/dinky-admin/src/main/java/org/dinky/mybatis/handler/DateMetaObjectHandler.java @@ -30,6 +30,7 @@ import cn.dev33.satoken.spring.SpringMVCUtil; import cn.dev33.satoken.stp.StpUtil; +import cn.hutool.core.lang.Opt; import lombok.extern.slf4j.Slf4j; /** @@ -95,8 +96,18 @@ private void setFillFieldValue(MetaObject metaObject, int userId) { setFieldValByName(mybatisPlusFillProperties.getOperatorField(), userId, metaObject); } if (tenantId == null) { - int loginTenantId = (Integer) TenantContextHolder.get(); - setFieldValByName(mybatisPlusFillProperties.getTenantIdField(), loginTenantId, metaObject); + try { + Opt loginTenantId = Opt.ofNullable(TenantContextHolder.get()); + loginTenantId.ifPresent(loginTenantId1 -> { + if (loginTenantId1 instanceof Integer) { + setFieldValByName(mybatisPlusFillProperties.getTenantIdField(), loginTenantId1, metaObject); + } + }); + } catch (Exception e) { + log.warn( + "Ignore set tenantId filed, because tenantId cant't get, Please check if your account is logged in normally or if it has been taken offline", + e); + } } } From f2a6d773d158213feb3b92c938f7841eae53307d Mon Sep 17 00:00:00 2001 From: ufoe Date: Wed, 22 May 2024 16:28:45 +0800 Subject: [PATCH 40/47] [Doc] Jar package task submission Demo bug fix (#3517) --- docs/docs/extend/expand_statements/execute_jar.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/extend/expand_statements/execute_jar.md b/docs/docs/extend/expand_statements/execute_jar.md index e40565b56b..08a60c3a7d 100644 --- a/docs/docs/extend/expand_statements/execute_jar.md +++ b/docs/docs/extend/expand_statements/execute_jar.md @@ -35,7 +35,7 @@ EXECUTE JAR WITH ( ```sql EXECUTE JAR WITH ( 'uri'='rs:/jar/flink/demo/SocketWindowWordCount.jar', -'main-class'='org.apache.flink.streaming.examples.socket', +'main-class'='org.apache.flink.streaming.examples.socket.SocketWindowWordCount', 'args'=' --hostname localhost ' ); ``` From 9b3e8d886cab3fd6808a8a32578675c7bc93882d Mon Sep 17 00:00:00 2001 From: XiuhongTang Date: Wed, 22 May 2024 22:44:34 +0800 Subject: [PATCH 41/47] [feature] add kubernetes deploy (#3516) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: 唐修红 --- {docker => deploy/docker}/.env | 0 {docker => deploy/docker}/Dockerfile | 0 deploy/docker/DockerfileDinkyFlink | 42 ++ {docker => deploy/docker}/DockerfileMysql | 0 {docker => deploy/docker}/DockerfileWeb | 0 {docker => deploy/docker}/README.md | 0 {docker => deploy/docker}/README_zh_CN.md | 0 .../docker}/docker-compose.dev.yml | 8 +- {docker => deploy/docker}/docker-compose.yml | 0 {docker => deploy/docker}/web/default.conf | 0 {docker => deploy/docker}/web/nginx.conf | 0 deploy/kubernetes/helm/dinky/.helmignore | 23 ++ deploy/kubernetes/helm/dinky/Chart.yaml | 26 ++ .../helm/dinky/conf/application-flyway.yml | 33 ++ .../helm/dinky/conf/application-h2.yml | 33 ++ .../helm/dinky/conf/application-jmx.yml | 42 ++ .../helm/dinky/conf/application-mysql.yml | 23 ++ .../helm/dinky/conf/application-pgsql.yml | 24 ++ .../helm/dinky/conf/application.yml | 167 ++++++++ .../dinky/conf/jmx/jmx_exporter_config.yaml | 14 + deploy/kubernetes/helm/dinky/conf/log4j2.xml | 86 +++++ .../kubernetes/helm/dinky/templates/NOTES.txt | 34 ++ .../helm/dinky/templates/_helpers.tpl | 71 ++++ .../helm/dinky/templates/configmap.yaml | 82 ++++ .../helm/dinky/templates/dinky.yaml | 169 ++++++++ .../helm/dinky/templates/ingress.yaml | 77 ++++ .../kubernetes/helm/dinky/templates/rbac.yaml | 86 +++++ .../secret-external-mysql-database.yaml | 13 + .../secret-external-postgres-database.yaml | 13 + .../templates/secret-mysql-database.yaml | 11 + .../templates/secret-postgres-database.yaml | 11 + .../templates/service-external-database.yaml | 22 ++ .../helm/dinky/templates/service.yaml | 31 ++ .../helm/dinky/templates/serviceaccount.yaml | 30 ++ deploy/kubernetes/helm/dinky/values.yaml | 126 ++++++ dinky-assembly/src/main/assembly/package.xml | 7 + docker/DockerfileDinkyFlink | 24 -- docs/docs/deploy_guide/kubernetes_deploy.md | 363 ++++++++++++++++++ 38 files changed, 1663 insertions(+), 28 deletions(-) rename {docker => deploy/docker}/.env (100%) rename {docker => deploy/docker}/Dockerfile (100%) create mode 100644 deploy/docker/DockerfileDinkyFlink rename {docker => deploy/docker}/DockerfileMysql (100%) rename {docker => deploy/docker}/DockerfileWeb (100%) rename {docker => deploy/docker}/README.md (100%) rename {docker => deploy/docker}/README_zh_CN.md (100%) rename {docker => deploy/docker}/docker-compose.dev.yml (87%) rename {docker => deploy/docker}/docker-compose.yml (100%) rename {docker => deploy/docker}/web/default.conf (100%) rename {docker => deploy/docker}/web/nginx.conf (100%) create mode 100755 deploy/kubernetes/helm/dinky/.helmignore create mode 100755 deploy/kubernetes/helm/dinky/Chart.yaml create mode 100755 deploy/kubernetes/helm/dinky/conf/application-flyway.yml create mode 100755 deploy/kubernetes/helm/dinky/conf/application-h2.yml create mode 100755 deploy/kubernetes/helm/dinky/conf/application-jmx.yml create mode 100755 deploy/kubernetes/helm/dinky/conf/application-mysql.yml create mode 100755 deploy/kubernetes/helm/dinky/conf/application-pgsql.yml create mode 100755 deploy/kubernetes/helm/dinky/conf/application.yml create mode 100755 deploy/kubernetes/helm/dinky/conf/jmx/jmx_exporter_config.yaml create mode 100755 deploy/kubernetes/helm/dinky/conf/log4j2.xml create mode 100755 deploy/kubernetes/helm/dinky/templates/NOTES.txt create mode 100755 deploy/kubernetes/helm/dinky/templates/_helpers.tpl create mode 100755 deploy/kubernetes/helm/dinky/templates/configmap.yaml create mode 100755 deploy/kubernetes/helm/dinky/templates/dinky.yaml create mode 100755 deploy/kubernetes/helm/dinky/templates/ingress.yaml create mode 100755 deploy/kubernetes/helm/dinky/templates/rbac.yaml create mode 100644 deploy/kubernetes/helm/dinky/templates/secret-external-mysql-database.yaml create mode 100644 deploy/kubernetes/helm/dinky/templates/secret-external-postgres-database.yaml create mode 100644 deploy/kubernetes/helm/dinky/templates/secret-mysql-database.yaml create mode 100644 deploy/kubernetes/helm/dinky/templates/secret-postgres-database.yaml create mode 100755 deploy/kubernetes/helm/dinky/templates/service-external-database.yaml create mode 100755 deploy/kubernetes/helm/dinky/templates/service.yaml create mode 100755 deploy/kubernetes/helm/dinky/templates/serviceaccount.yaml create mode 100755 deploy/kubernetes/helm/dinky/values.yaml delete mode 100644 docker/DockerfileDinkyFlink create mode 100644 docs/docs/deploy_guide/kubernetes_deploy.md diff --git a/docker/.env b/deploy/docker/.env similarity index 100% rename from docker/.env rename to deploy/docker/.env diff --git a/docker/Dockerfile b/deploy/docker/Dockerfile similarity index 100% rename from docker/Dockerfile rename to deploy/docker/Dockerfile diff --git a/deploy/docker/DockerfileDinkyFlink b/deploy/docker/DockerfileDinkyFlink new file mode 100644 index 0000000000..744b408dd1 --- /dev/null +++ b/deploy/docker/DockerfileDinkyFlink @@ -0,0 +1,42 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +FROM openjdk:11 as build-stage +ARG FLINK_VERSION=1.17 +ENV DINKY_VERSION=1.0.2 +ADD dinky-release-${FLINK_VERSION}-${DINKY_VERSION}.tar.gz / + +USER root +RUN mv /dinky-release-${FLINK_VERSION}-${DINKY_VERSION} /dinky/ +RUN mkdir -p /dinky/run && mkdir -p /dinky/logs && touch /dinky/logs/dinky.log +RUN chmod -R 777 /dinky/ + + +FROM flink:scala_2.12-java11 as reqired-stage + +FROM openjdk:11 as production-stage +ARG FLINK_VERSION=1-17 +COPY --from=build-stage /dinky/ /dinky/ +COPY --from=reqired-stage /opt/flink/lib/*.jar /dinky/extends/flink${FLINK_VERSION}/ +RUN rm -f /dinky/extends/flink${FLINK_VERSION}/flink-table-planner-loader*.jar +COPY --from=reqired-stage /opt/flink/opt/flink-table-planner*.jar /dinky/extends/flink${FLINK_VERSION}/ +ADD mysql-connector-j-8.1.0.jar /dinky/extends/flink${FLINK_VERSION}/ +WORKDIR /dinky/ + +EXPOSE 8888 + +CMD ./auto.sh restart && tail -f /dinky/logs/dinky.log \ No newline at end of file diff --git a/docker/DockerfileMysql b/deploy/docker/DockerfileMysql similarity index 100% rename from docker/DockerfileMysql rename to deploy/docker/DockerfileMysql diff --git a/docker/DockerfileWeb b/deploy/docker/DockerfileWeb similarity index 100% rename from docker/DockerfileWeb rename to deploy/docker/DockerfileWeb diff --git a/docker/README.md b/deploy/docker/README.md similarity index 100% rename from docker/README.md rename to deploy/docker/README.md diff --git a/docker/README_zh_CN.md b/deploy/docker/README_zh_CN.md similarity index 100% rename from docker/README_zh_CN.md rename to deploy/docker/README_zh_CN.md diff --git a/docker/docker-compose.dev.yml b/deploy/docker/docker-compose.dev.yml similarity index 87% rename from docker/docker-compose.dev.yml rename to deploy/docker/docker-compose.dev.yml index 4cf467fb04..4275d2e1b0 100644 --- a/docker/docker-compose.dev.yml +++ b/deploy/docker/docker-compose.dev.yml @@ -3,13 +3,13 @@ version: "3.9" services: mysql: build: - context: .. + context: ../.. dockerfile: ${PWD:-.}/DockerfileMysql dinky: image: dinky-standalone-server:${DINKY_VERSION} build: - context: .. + context: ../.. dockerfile: ${PWD:-.}/Dockerfile args: - DINKY_VERSION @@ -18,7 +18,7 @@ services: standalone: build: - context: .. + context: ../.. dockerfile: ${PWD:-.}/DockerfileDinkyFlink args: - FLINK_BIG_VERSION @@ -31,5 +31,5 @@ services: web: build: - context: .. + context: ../.. dockerfile: ${PWD:-.}/DockerfileWeb diff --git a/docker/docker-compose.yml b/deploy/docker/docker-compose.yml similarity index 100% rename from docker/docker-compose.yml rename to deploy/docker/docker-compose.yml diff --git a/docker/web/default.conf b/deploy/docker/web/default.conf similarity index 100% rename from docker/web/default.conf rename to deploy/docker/web/default.conf diff --git a/docker/web/nginx.conf b/deploy/docker/web/nginx.conf similarity index 100% rename from docker/web/nginx.conf rename to deploy/docker/web/nginx.conf diff --git a/deploy/kubernetes/helm/dinky/.helmignore b/deploy/kubernetes/helm/dinky/.helmignore new file mode 100755 index 0000000000..0e8a0eb36f --- /dev/null +++ b/deploy/kubernetes/helm/dinky/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/deploy/kubernetes/helm/dinky/Chart.yaml b/deploy/kubernetes/helm/dinky/Chart.yaml new file mode 100755 index 0000000000..005558e732 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/Chart.yaml @@ -0,0 +1,26 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +apiVersion: v2 +name: dinky +description: Dinky 为 Apache Flink 而生,让 Flink SQL 纵享丝滑 +home: https://www.dinky.org.cn +icon: https://www.dinky.org.cn/side_dinky.svg +type: application +version: 1.0.0 +appVersion: 1.0.0 diff --git a/deploy/kubernetes/helm/dinky/conf/application-flyway.yml b/deploy/kubernetes/helm/dinky/conf/application-flyway.yml new file mode 100755 index 0000000000..8eafc7acd0 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/conf/application-flyway.yml @@ -0,0 +1,33 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +spring: + flyway: + enabled: true # Is it enabled + group: true # Enable grouping + locations: + - classpath:db/migration/${spring.profiles.active} + table: _dinky_flyway_schema_history + placeholder-replacement: false # Placeholder Replacement + baseline-on-migrate: true # Baseline during migration + baseline-version: 1.0.2 # Baseline version + validate-on-migrate: false # Verify during migration + placeholder-prefix: '##{' + placeholder-suffix: '}##' + fail-on-missing-locations: true # Does the non-existent migration file throw an exception + diff --git a/deploy/kubernetes/helm/dinky/conf/application-h2.yml b/deploy/kubernetes/helm/dinky/conf/application-h2.yml new file mode 100755 index 0000000000..f9ea212b83 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/conf/application-h2.yml @@ -0,0 +1,33 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +spring: + datasource: + driver-class-name: org.h2.Driver + url: jdbc:h2:mem:dinky;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true; + # Note: Since dinky version 1.1.0, Flyway has been integrated, so h2 defaults to memory mode and no longer uses file mode (persistence). + # File mode can cause Flyway to fail to execute duplicate column addition operations correctly + # as h2 does not support stored procedures and cannot determine the existence of columns through stored procedures. Therefore, please do not use file mode +# url: jdbc:h2:./tmp/db/h2;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;AUTO_SERVER=TRUE;AUTO_SERVER_PORT=29099; + druid: + initial-size: 1 + username: dinky + password: dinky + h2: + console: + enabled: true + path: /api/h2 \ No newline at end of file diff --git a/deploy/kubernetes/helm/dinky/conf/application-jmx.yml b/deploy/kubernetes/helm/dinky/conf/application-jmx.yml new file mode 100755 index 0000000000..0123634ab4 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/conf/application-jmx.yml @@ -0,0 +1,42 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +management: + endpoints: + web: + exposure: + include: "*" + exclude: + - heapdump + - threaddump + - env + enabled-by-default: true + jmx: + exposure: + include: "*" + health: + redis: + enabled: false + ldap: + enabled: false + endpoint: + health: + show-details: always + metrics: + tags: + application: ${spring.application.name} \ No newline at end of file diff --git a/deploy/kubernetes/helm/dinky/conf/application-mysql.yml b/deploy/kubernetes/helm/dinky/conf/application-mysql.yml new file mode 100755 index 0000000000..6c71564216 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/conf/application-mysql.yml @@ -0,0 +1,23 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +spring: + datasource: + url: jdbc:mysql://${MYSQL_ADDR:127.0.0.1:3306}/${MYSQL_DATABASE:dinky}?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&useSSL=false&zeroDateTimeBehavior=convertToNull&serverTimezone=Asia/Shanghai&allowPublicKeyRetrieval=true + username: ${MYSQL_USERNAME:dinky} + password: ${MYSQL_PASSWORD:dinky} + driver-class-name: com.mysql.cj.jdbc.Driver diff --git a/deploy/kubernetes/helm/dinky/conf/application-pgsql.yml b/deploy/kubernetes/helm/dinky/conf/application-pgsql.yml new file mode 100755 index 0000000000..eff2bdf64a --- /dev/null +++ b/deploy/kubernetes/helm/dinky/conf/application-pgsql.yml @@ -0,0 +1,24 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +spring: + datasource: + username: ${POSTGRES_USER:dinky} + password: ${POSTGRES_PASSWORD:dinky} + driver-class-name: org.postgresql.Driver + # The POSTGRES_ADDR variable is not officially provided. If you use docker, please manually add an env to support it. + url: jdbc:postgresql://${POSTGRES_ADDR:localhost:5432}/${POSTGRES_DB:dinky}?stringtype=unspecified \ No newline at end of file diff --git a/deploy/kubernetes/helm/dinky/conf/application.yml b/deploy/kubernetes/helm/dinky/conf/application.yml new file mode 100755 index 0000000000..a8c537a333 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/conf/application.yml @@ -0,0 +1,167 @@ +################################################################################################################# +################################################# Common Config ################################################# +################################################################################################################# +# Dinky application port +server: + port: 8888 + shutdown: graceful + +spring: + # Dinky application name + application: + name: Dinky + profiles: + # The h2 database is used by default. If you need to use other databases, please set the configuration active to: mysql, currently supports [mysql, pgsql, h2] + # If you use mysql database, please configure mysql database connection information in application-mysql.yml + # If you use pgsql database, please configure pgsql database connection information in application-pgsql.yml + # If you use the h2 database, please configure the h2 database connection information in application-h2.yml, + # note: the h2 database is only for experience use, and the related data that has been created cannot be migrated, please use it with caution + active: ${DB_ACTIVE:h2} #[h2,mysql,pgsql] + include: + - jmx + - flyway + lifecycle: + timeout-per-shutdown-phase: 30s + + # mvc config + mvc: + pathmatch: + # Path matching strategy, default ant_path_matcher, support ant_path_matcher and path_pattern_parser + matching-strategy: ant_path_matcher + format: + date: yyyy-MM-dd HH:mm:ss # date format + time: HH:mm:ss # time format + date-time: yyyy-MM-dd HH:mm:ss # date-time format + + # json format global configuration + jackson: + time-zone: GMT+8 # Time zone, default is GMT+8 + date-format: yyyy-MM-dd HH:mm:ss # Date format, the default is yyyy-MM-dd HH:mm:ss + + # circular references allowed + main: + allow-circular-references: true + + # file upload config of servlet, the default is 500MB + servlet: + multipart: + enabled: true + max-file-size: 524288000 + max-request-size: 524288000 + + # 1. By default, memory cache metadata information is used, + # 2. DINKY supports Redis caching. If necessary, please change simple to Redis and open the Redis connection configuration below + # 3. Sub configuration items can be opened or customized as needed + cache: + type: simple + # If the type is configured as Redis, this item can be configured as needed +# redis: + # Do you want to cache empty values? Just save by default +# cache-null-values: false + # Cache expiration time, 24 hours +# time-to-live: 86400 + + ########################################################## Redis配置 ########################################################## + # Note: Redis related configurations in DINKY can be used to cache meta-data information (memory caching is used by default) and cache session information of SA TOKEN (dependency needs to be added,please refer to the official documentation of SA TOKEN for Redis caching configuration instructions) + ## 1. If you need to use Redis to cache metadata information, please configure cache. Type to Redis and then configure the following configuration items + ## 2. If you need to use Redis to cache SA Token session information, please follow the instructions in the official SA Token documentation to configure it + # Note: Please pay attention to the indentation after opening comments, otherwise it may cause configuration file parsing errors. Note that when both 1 and 2 are used simultaneously, only the same Redis database can be supported +# redis: +# host: localhost +# port: 6379 +# username: +# password: + # Redis database number +# database: 10 +# jedis: +# pool: + # The maximum number of connections in the connection pool (use a negative value to indicate no limit) +# max-active: 50 + # The maximum blocking waiting time of the connection pool (use a negative value to indicate no limit) +# max-wait: 3000 + # The maximum number of idle connections in the connection pool +# max-idle: 20 + # The minimum number of idle connections in the connection pool +# min-idle: 5 + # Connection timeout (milliseconds) +# timeout: 5000 + +--- + +################################################################################################################# +################################################# Mybatis Config ################################################ +######### Please note: that the following configurations are not recommended to be modified ##################### +################################################################################################################# +mybatis-plus: + mapper-locations: classpath:/mapper/*Mapper.xml + # Entity scanning, multiple packages are separated by commas or semicolons + typeAliasesPackage: org.dinky.model + global-config: + db-config: + id-type: auto + # Logic delete configuration : 0: false(Not deleted), 1: true(deleted) + logic-delete-field: is_delete + logic-delete-value: 1 + logic-not-delete-value: 0 + banner: false + configuration: + ##### mybatis-plus prints complete sql (only for development environment) + #log-impl: org.apache.ibatis.logging.stdout.StdOutImpl + log-impl: org.apache.ibatis.logging.nologging.NoLoggingImpl + type-handlers-package: org.dinky.data.typehandler + +--- +################################################################################################################# +################################################# SMS Config #################################################### +################################################################################################################# +sms: + is-print: false + + +--- +################################################################################################################# +################################################# Sa-Token Config ############################################### +################################################################################################################# +# Sa-Token basic configuration +sa-token: + # The validity period of the token, the unit is 10 hours by default, -1 means it will never expire + timeout: 36000 + # The temporary validity period of the token (the token will be considered as expired if there is no operation within the specified time) + # unit: second , if you do not need to set a temporary token, you can set it to -1 + active-timeout: -1 + # Whether to allow the same account to log in concurrently (when true, allow login together, when false, new login squeezes out old login) + is-concurrent: false + # When multiple people log in to the same account, whether to share a token (if true, all logins share a token, and if false, create a new token for each login) + is-share: true + # token style + token-style: uuid + # Whether to output the operation log + is-log: false + # Whether to print banner + is-print: false + # The secret key + jwt-secret-key: 0DA4198858E84F1AADDF846340587A85 + # is write header + is-write-header: true + # is read header + is-read-header: true + token-name: token + is-read-cookie: true + +--- +################################################################################################################# +################################################# knife4j Config ################################################ +################################################################################################################# +knife4j: + enable: true + setting: + language: en + + +--- +################################################################################################################# +################################################# Crypto Config ################################################# +################################################################################################################# +crypto: + enabled: false + encryption-password: diff --git a/deploy/kubernetes/helm/dinky/conf/jmx/jmx_exporter_config.yaml b/deploy/kubernetes/helm/dinky/conf/jmx/jmx_exporter_config.yaml new file mode 100755 index 0000000000..e34f855467 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/conf/jmx/jmx_exporter_config.yaml @@ -0,0 +1,14 @@ +--- +lowercaseOutputLabelNames: true +lowercaseOutputName: true +whitelistObjectNames: ["java.lang:type=OperatingSystem"] +blacklistObjectNames: [] +rules: + - pattern: 'java.lang<>(committed_virtual_memory|free_physical_memory|free_swap_space|total_physical_memory|total_swap_space)_size:' + name: os_$1_bytes + type: GAUGE + attrNameSnakeCase: true + - pattern: 'java.lang<>((?!process_cpu_time)\w+):' + name: os_$1 + type: GAUGE + attrNameSnakeCase: true \ No newline at end of file diff --git a/deploy/kubernetes/helm/dinky/conf/log4j2.xml b/deploy/kubernetes/helm/dinky/conf/log4j2.xml new file mode 100755 index 0000000000..088ef44587 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/conf/log4j2.xml @@ -0,0 +1,86 @@ + + + + + ${sys:dinky.logs.path:-./logs/} + dinky + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/deploy/kubernetes/helm/dinky/templates/NOTES.txt b/deploy/kubernetes/helm/dinky/templates/NOTES.txt new file mode 100755 index 0000000000..6d3ea03ba4 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/NOTES.txt @@ -0,0 +1,34 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +** Please be patient while the chart dinky {{ .Chart.AppVersion }} is being deployed ** + +Access dinky UI URL by: + +ChartVersion:{{ .Chart.Version}}[refers to the release version] +appVersion:{{ .Chart.Version }}[refers to the code version] + +{{- if .Values.ingress.enabled }} + + dinky UI URL: https://{{ .Values.ingress.host }}/dinky + +{{- else if eq .Values.service.type "NodePort" }} + +You can try the following command to get the ip, port of dinky: +kubectl get no -n {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}" +kubectl get svc {{ .Values.service.name }} -n {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" + +{{- end }} diff --git a/deploy/kubernetes/helm/dinky/templates/_helpers.tpl b/deploy/kubernetes/helm/dinky/templates/_helpers.tpl new file mode 100755 index 0000000000..28b76097d9 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/_helpers.tpl @@ -0,0 +1,71 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "dinky.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +*/}} +{{- define "dinky.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "dinky.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default common labels. +*/}} +{{- define "dinky.labels" -}} +helm.sh/chart: {{ include "dinky.chart" . }} +{{ include "dinky.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "dinky.selectorLabels" -}} +app.kubernetes.io/name: {{ include "dinky.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* + Create service account name +*/}} +{{- define "dinky.serviceAccountName" -}} +{{- if .Values.dinkyServiceAccount.create }} +{{- default (include "dinky.fullname" .) .Values.dinkyServiceAccount.name }} +{{- else }} +{{- default "default" .Values.dinkyServiceAccount.name }} +{{- end }} +{{- end }} + +{{- define "dinky.dbActive" -}} +- name: DB_ACTIVE + {{- if .Values.postgresql.enabled }} + value: "pgsql" + {{- else if .Values.mysql.enabled }} + value: "mysql" + {{- else }} + value: {{ .Values.externalDatabase.type | quote }} + {{- end }} +{{- end }} \ No newline at end of file diff --git a/deploy/kubernetes/helm/dinky/templates/configmap.yaml b/deploy/kubernetes/helm/dinky/templates/configmap.yaml new file mode 100755 index 0000000000..dbc25276e6 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/configmap.yaml @@ -0,0 +1,82 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.dinkyDefaultConfiguration.create }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: dinky-config + namespace: {{ .Release.Namespace }} + labels: + {{- include "dinky.labels" . | nindent 4 }} +data: + application.yml: |+ +{{- if .Values.dinkyDefaultConfiguration.append }} + {{- $.Files.Get "conf/application.yml" | nindent 4 -}} +{{- end }} +{{- if index (.Values.dinkyDefaultConfiguration) "application.yml" }} + {{- index (.Values.dinkyDefaultConfiguration) "application.yml" | nindent 4 -}} +{{- end }} + application-h2.yml: |+ +{{- if .Values.dinkyDefaultConfiguration.append }} + {{- $.Files.Get "conf/application-h2.yml" | nindent 4 -}} +{{- end }} +{{- if index (.Values.dinkyDefaultConfiguration) "application-h2.yml" }} + {{- index (.Values.dinkyDefaultConfiguration) "application-h2.yml" | nindent 4 -}} +{{- end }} + application-mysql.yml: |+ +{{- if .Values.dinkyDefaultConfiguration.append }} + {{- $.Files.Get "conf/application-mysql.yml" | nindent 4 -}} +{{- end }} +{{- if index (.Values.dinkyDefaultConfiguration) "application-mysql.yml" }} + {{- index (.Values.dinkyDefaultConfiguration) "application-mysql.yml" | nindent 4 -}} +{{- end }} + application-pgsql.yml: |+ +{{- if .Values.dinkyDefaultConfiguration.append }} + {{- $.Files.Get "conf/application-pgsql.yml" | nindent 4 -}} +{{- end }} +{{- if index (.Values.dinkyDefaultConfiguration) "application-pgsql.yml" }} + {{- index (.Values.dinkyDefaultConfiguration) "application-pgsql.yml" | nindent 4 -}} +{{- end }} + application-jmx.yml: |+ + {{- if .Values.dinkyDefaultConfiguration.append }} + {{- $.Files.Get "conf/application-jmx.yml" | nindent 4 -}} + {{- end }} + {{- if index (.Values.dinkyDefaultConfiguration) "application-jmx.yml" }} + {{- index (.Values.dinkyDefaultConfiguration) "application-jmx.yml" | nindent 4 -}} + {{- end }} + application.properties: |+ + {{- if .Values.dinkyDefaultConfiguration.append }} + {{- $.Files.Get "conf/application.properties" | nindent 4 -}} + {{- end }} + {{- if index (.Values.dinkyDefaultConfiguration) "application.properties" }} + {{- index (.Values.dinkyDefaultConfiguration) "application.properties" | nindent 4 -}} + {{- end }} + log4j2.xml: |+ + {{- if .Values.dinkyDefaultConfiguration.append }} + {{- $.Files.Get "conf/log4j2.xml" | nindent 4 -}} + {{- end }} + {{- if index (.Values.dinkyDefaultConfiguration) "log4j2.xml" }} + {{- index (.Values.dinkyDefaultConfiguration) "log4j2.xml" | nindent 4 -}} + {{- end }} + jmx_exporter_config.yaml: |+ + {{- if .Values.dinkyDefaultConfiguration.append }} + {{- $.Files.Get "conf/jmx/jmx_exporter_config.yaml" | nindent 4 -}} + {{- end }} + {{- if index (.Values.dinkyDefaultConfiguration) "jmx_exporter_config.yaml" }} + {{- index (.Values.dinkyDefaultConfiguration) "jmx_exporter_config.yaml" | nindent 4 -}} + {{- end }} +{{- end }} diff --git a/deploy/kubernetes/helm/dinky/templates/dinky.yaml b/deploy/kubernetes/helm/dinky/templates/dinky.yaml new file mode 100755 index 0000000000..d4485255fe --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/dinky.yaml @@ -0,0 +1,169 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "dinky.name" . }} + namespace: {{ .Release.Namespace | default "default"}} + labels: + {{- include "dinky.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.spec.replicaCount }} + selector: + matchLabels: + {{- include "dinky.selectorLabels" . | nindent 6 }} + template: + metadata: + labels: + {{- include "dinky.selectorLabels" . | nindent 8 }} + spec: + {{- if .Values.spec.affinity }} + affinity: + {{- toYaml .Values.spec.affinity | nindent 8 }} + {{- end }} + {{- if .Values.spec.nodeSelector }} + nodeSelector: + {{- toYaml .Values.spec.nodeSelector | nindent 8 }} + {{- end }} + {{- if .Values.spec.tolerations }} + tolerations: + {{- toYaml .Values.spec.tolerations | nindent 8 }} + {{- end }} + {{- if .Values.image.pullSecret }} + imagePullSecrets: + - name: {{ .Values.image.pullSecret }} + {{- end }} + serviceAccountName: {{ include "dinky.serviceAccountName" . }} + containers: + - image: {{ .Values.image.repository }}:{{ .Values.image.tag}} + name: {{ .Chart.Name }} + imagePullPolicy: {{ .Values.image.pullPolicy }} + ports: + - name: {{ .Values.spec.name }} + containerPort: {{ .Values.spec.containerPort }} + protocol: TCP + env: + {{- if .Values.mysql.enabled }} + - name: MYSQL_DATABASE + value: {{ .Values.mysql.auth.database }} + {{- else if .Values.postgresql.enabled }} + - name: POSTGRES_DB + value: {{ .Values.postgresql.auth.database }} + {{- else }} + {{ if eq .Values.externalDatabase.type "mysql" }} + - name: MYSQL_DATABASE + value: {{ .Values.externalDatabase.auth.database }} + {{- else }} + - name: POSTGRES_DB + value: {{ .Values.externalDatabase.auth.database }} + {{- end }} + {{- end }} + {{- include "dinky.dbActive" . | nindent 12 }} + envFrom: + {{- if .Values.mysql.enabled }} + - secretRef: + name: {{ include "dinky.name" . }}-mysql + {{- end }} + {{- if .Values.postgresql.enabled }} + - secretRef: + name: {{ include "dinky.name" . }}-postgres + {{- end }} + {{- if .Values.externalDatabase.enabled }} + {{ if eq .Values.externalDatabase.type "mysql" }} + - secretRef: + name: {{ include "dinky.name" . }}-mysql + {{- else }} + - secretRef: + name: {{ include "dinky.name" . }}-postgres + {{- end }} + {{- end }} + securityContext: + privileged: false + command: + - /bin/bash + - '-c' + - >- + /dinky/auto.sh startOnPending {{ .Values.spec.extraEnv.flinkVersion}} + args: + {{- if .Values.spec.livenessProbe.enabled }} + livenessProbe: + exec: + command: [ "curl", "-s", "http://localhost:{{ .Values.spec.containerPort }}/actuator/health/liveness" ] + initialDelaySeconds: {{ .Values.spec.livenessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.spec.livenessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.spec.livenessProbe.timeoutSeconds }} + successThreshold: {{ .Values.spec.livenessProbe.successThreshold }} + failureThreshold: {{ .Values.spec.livenessProbe.failureThreshold }} + {{- end }} + {{- if .Values.spec.readinessProbe.enabled }} + readinessProbe: + exec: + command: [ "curl", "-s", "http://localhost:{{ .Values.spec.containerPort }}/actuator/health/readiness" ] + initialDelaySeconds: {{ .Values.spec.readinessProbe.initialDelaySeconds }} + periodSeconds: {{ .Values.spec.readinessProbe.periodSeconds }} + timeoutSeconds: {{ .Values.spec.readinessProbe.timeoutSeconds }} + successThreshold: {{ .Values.spec.readinessProbe.successThreshold }} + failureThreshold: {{ .Values.spec.readinessProbe.failureThreshold }} + {{- end }} + volumeMounts: + - mountPath: /dinky/conf/application.properties + name: dinky-config-volume + subPath: application.properties + - mountPath: /dinky/conf/application.yml + name: dinky-config-volume + subPath: application.yml + - mountPath: /dinky/conf/application-h2.yml + name: dinky-config-volume + subPath: application-h2.yml + - mountPath: /dinky/conf/application-jmx.yml + name: dinky-config-volume + subPath: application-jmx.yml + - mountPath: /dinky/conf/application-mysql.yml + name: dinky-config-volume + subPath: application-mysql.yml + - mountPath: /dinky/conf/application-pgsql.yml + name: dinky-config-volume + subPath: application-pgsql.yml + - mountPath: /dinky/conf/log4j2.xml + name: dinky-config-volume + subPath: log4j2.xml + - mountPath: /dinky/conf/jmx/jmx_exporter_config.yaml + name: dinky-config-volume + subPath: jmx_exporter_config.yaml + resources: + {{- toYaml .Values.spec.resources | nindent 12 }} + volumes: + - name: dinky-config-volume + configMap: + name: dinky-config + items: + - key: application.properties + path: application.properties + - key: application.yml + path: application.yml + - key: application-h2.yml + path: application-h2.yml + - key: application-jmx.yml + path: application-jmx.yml + - key: application-mysql.yml + path: application-mysql.yml + - key: application-pgsql.yml + path: application-pgsql.yml + - key: log4j2.xml + path: log4j2.xml + - key: jmx_exporter_config.yaml + path: jmx_exporter_config.yaml \ No newline at end of file diff --git a/deploy/kubernetes/helm/dinky/templates/ingress.yaml b/deploy/kubernetes/helm/dinky/templates/ingress.yaml new file mode 100755 index 0000000000..09f52deb8a --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/ingress.yaml @@ -0,0 +1,77 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- if .Values.ingress.enabled -}} +{{- $fullName := include "dinky.fullname" . -}} +{{- $svcPort := .Values.spec.containerPort -}} +{{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }} + {{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }} + {{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}} + {{- end }} +{{- end }} +{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}} +apiVersion: networking.k8s.io/v1 +{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} +apiVersion: networking.k8s.io/v1beta1 +{{- else -}} +apiVersion: extensions/v1beta1 +{{- end }} +kind: Ingress +metadata: + name: {{ $fullName }} + labels: + {{- include "dinky.labels" . | nindent 4 }} + {{- with .Values.ingress.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +spec: + {{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }} + ingressClassName: {{ .Values.ingress.className }} + {{- end }} + {{- if .Values.ingress.tls }} + tls: + {{- range .Values.ingress.tls }} + - hosts: + {{- range .hosts }} + - {{ . | quote }} + {{- end }} + secretName: {{ .secretName }} + {{- end }} + {{- end }} + rules: + {{- range .Values.ingress.hosts }} + - host: {{ .host | quote }} + http: + paths: + {{- range .paths }} + - path: {{ .path }} + {{- if and .pathType (semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion) }} + pathType: {{ .pathType }} + {{- end }} + backend: + {{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }} + service: + name: {{ $fullName }} + port: + number: {{ $svcPort }} + {{- else }} + serviceName: {{ $fullName }} + servicePort: {{ $svcPort }} + {{- end }} + {{- end }} + {{- end }} +{{- end }} diff --git a/deploy/kubernetes/helm/dinky/templates/rbac.yaml b/deploy/kubernetes/helm/dinky/templates/rbac.yaml new file mode 100755 index 0000000000..2917d40e64 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/rbac.yaml @@ -0,0 +1,86 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +{{- define "dinky.rbacRules" }} +rules: + - apiGroups: + - "*" + resources: + - "*" + verbs: + - "*" +{{- end }} +--- +{{- if .Values.rbac.create }} +--- + +{{- if .Values.watchNamespaces }} +{{- range .Values.watchNamespaces }} +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: dinky + namespace: {{ . }} + labels: + {{- include "dinky.labels" $ | nindent 4 }} +{{- template "dinky.rbacRules" $ }} +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: dinky-role-binding + namespace: {{ . }} + labels: + {{- include "dinky.labels" $ | nindent 4 }} +roleRef: + kind: Role + name: dinky + apiGroup: rbac.authorization.k8s.io +subjects: + - kind: ServiceAccount + name: {{ include "dinky.serviceAccountName" $ }} + namespace: {{ $.Release.Namespace }} +--- +{{- end }} +{{ else }} + +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + name: dinky-cluster + namespace: {{ .Release.Namespace }} + labels: + {{- include "dinky.labels" . | nindent 4 }} +{{- template "dinky.rbacRules" $ }} +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: dinky-cluster-role-binding + namespace: {{ .Release.Namespace }} + labels: + {{- include "dinky.labels" . | nindent 4 }} +roleRef: + kind: ClusterRole + name: dinky-cluster + apiGroup: rbac.authorization.k8s.io +subjects: + - kind: ServiceAccount + name: {{ include "dinky.serviceAccountName" . }} + namespace: {{ .Release.Namespace }} +{{- end }} +{{- end }} diff --git a/deploy/kubernetes/helm/dinky/templates/secret-external-mysql-database.yaml b/deploy/kubernetes/helm/dinky/templates/secret-external-mysql-database.yaml new file mode 100644 index 0000000000..9717dd004d --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/secret-external-mysql-database.yaml @@ -0,0 +1,13 @@ +{{- if .Values.externalDatabase.enabled -}} +{{ if eq .Values.externalDatabase.type "mysql" }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "dinky.name" . }}-mysql +data: + MYSQL_ADDR: {{ .Values.externalDatabase.url | b64enc | quote }} + MYSQL_USERNAME: {{ .Values.externalDatabase.auth.username | b64enc | quote }} + MYSQL_PASSWORD: {{ .Values.externalDatabase.auth.password | b64enc | quote }} +type: Opaque +{{- end }} +{{- end }} \ No newline at end of file diff --git a/deploy/kubernetes/helm/dinky/templates/secret-external-postgres-database.yaml b/deploy/kubernetes/helm/dinky/templates/secret-external-postgres-database.yaml new file mode 100644 index 0000000000..45b3a3a7b5 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/secret-external-postgres-database.yaml @@ -0,0 +1,13 @@ +{{- if .Values.externalDatabase.enabled -}} +{{ if eq .Values.externalDatabase.type "postgresql" }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "dinky.name" . }}-postgres +data: + POSTGRES_ADDR: {{ .Values.externalDatabase.url | b64enc | quote }} + POSTGRES_USER: {{ .Values.externalDatabase.auth.username | b64enc | quote }} + POSTGRES_PASSWORD: {{ .Values.externalDatabase.auth.password | b64enc | quote }} +type: Opaque +{{- end }} +{{- end }} \ No newline at end of file diff --git a/deploy/kubernetes/helm/dinky/templates/secret-mysql-database.yaml b/deploy/kubernetes/helm/dinky/templates/secret-mysql-database.yaml new file mode 100644 index 0000000000..0c44962644 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/secret-mysql-database.yaml @@ -0,0 +1,11 @@ +{{- if .Values.mysql.enabled -}} +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "dinky.name" . }}-mysql +data: + MYSQL_ADDR: {{ .Values.mysql.url | b64enc | quote }} + MYSQL_USERNAME: {{ .Values.mysql.auth.username | b64enc | quote }} + MYSQL_PASSWORD: {{ .Values.mysql.auth.password | b64enc | quote }} +type: Opaque +{{- end }} \ No newline at end of file diff --git a/deploy/kubernetes/helm/dinky/templates/secret-postgres-database.yaml b/deploy/kubernetes/helm/dinky/templates/secret-postgres-database.yaml new file mode 100644 index 0000000000..169bb2a042 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/secret-postgres-database.yaml @@ -0,0 +1,11 @@ +{{- if .Values.postgresql.enabled -}} +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "dinky.name" . }}-postgres +data: + POSTGRES_ADDR: {{ .Values.postgresql.url | b64enc | quote }} + POSTGRES_USER: {{ .Values.postgresql.auth.username | b64enc | quote }} + POSTGRES_PASSWORD: {{ .Values.postgresql.auth.password | b64enc | quote }} +type: Opaque +{{- end }} \ No newline at end of file diff --git a/deploy/kubernetes/helm/dinky/templates/service-external-database.yaml b/deploy/kubernetes/helm/dinky/templates/service-external-database.yaml new file mode 100755 index 0000000000..f1ef0d2709 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/service-external-database.yaml @@ -0,0 +1,22 @@ +{{- if .Values.externalDatabase.enabled -}} +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.service.name }}-external-svc +spec: + clusterIP: {{ .Values.externalDatabaseService.clusterIP }} + ports: + - port: {{ .Values.externalDatabaseService.port }} + targetPort: {{ .Values.externalDatabaseService.port }} + protocol: TCP +--- +apiVersion: v1 +kind: Endpoints +metadata: + name: {{ .Values.service.name }}-external-svc +subsets: + - addresses: + - ip: {{ .Values.externalDatabaseEndpoints.ip }} + ports: + - port: {{ .Values.externalDatabaseEndpoints.port }} +{{- end }} \ No newline at end of file diff --git a/deploy/kubernetes/helm/dinky/templates/service.yaml b/deploy/kubernetes/helm/dinky/templates/service.yaml new file mode 100755 index 0000000000..dfacc0615b --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/service.yaml @@ -0,0 +1,31 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.service.name }} + labels: + {{- include "dinky.labels" . | nindent 4 }} +spec: + type: {{ .Values.service.type }} + ports: + - port: {{ .Values.spec.containerPort }} + targetPort: {{ .Values.spec.name }} + protocol: TCP + name: {{ .Values.spec.name }} + selector: + {{- include "dinky.selectorLabels" . | nindent 4 }} diff --git a/deploy/kubernetes/helm/dinky/templates/serviceaccount.yaml b/deploy/kubernetes/helm/dinky/templates/serviceaccount.yaml new file mode 100755 index 0000000000..d24c47fc62 --- /dev/null +++ b/deploy/kubernetes/helm/dinky/templates/serviceaccount.yaml @@ -0,0 +1,30 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +--- +{{- if .Values.dinkyServiceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "dinky.serviceAccountName" . }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "dinky.labels" . | nindent 4 }} + {{- with .Values.dinkyServiceAccount.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +{{- end }} diff --git a/deploy/kubernetes/helm/dinky/values.yaml b/deploy/kubernetes/helm/dinky/values.yaml new file mode 100755 index 0000000000..a42994c46f --- /dev/null +++ b/deploy/kubernetes/helm/dinky/values.yaml @@ -0,0 +1,126 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# + +timezone: "Asia/Shanghai" + +nameOverride: "" +fullnameOverride: "" + +image: + repository: "docker.dinky.org.cn:32451/dinky/dev/dinky-txh" + pullPolicy: "Always" + tag: "1.0.2" + pullSecret: "" + +mysql: + enabled: false + url: "172.168.1.111:31476" + auth: + username: "dinky" + password: "bigdata123!@#" + database: "dinky-dev-1-17" + +postgresql: + enabled: false + url: "172.168.1.111:31476" + auth: + username: "dinky" + password: "bigdata123!@#" + database: "dinky-dev-1-17" + +externalDatabase: + enabled: true + type: "mysql" + url: "10.43.2.12:3306" + auth: + username: "root" + password: "Dinky@1234567!" + database: "dinky" + +externalDatabaseService: + clusterIP: 10.43.2.12 + port: 3306 + +externalDatabaseEndpoints: + ip: 172.168.1.110 + port: 3306 + +rbac: + create: true + +spec: + replicaCount: 1 + containerPort: 8888 + name: rest + extraEnv: + flinkVersion: "1.17" + + affinity: {} + nodeSelector: {} + tolerations: [] + resources: {} + # resources: + # limits: + # memory: "2Gi" + # cpu: "1" + # requests: + # memory: "1Gi" + # cpu: "500m" + livenessProbe: + enabled: true + initialDelaySeconds: "90" + periodSeconds: "30" + timeoutSeconds: "20" + failureThreshold: "3" + successThreshold: "1" + + readinessProbe: + enabled: true + initialDelaySeconds: "90" + periodSeconds: "30" + timeoutSeconds: "20" + failureThreshold: "3" + successThreshold: "1" + +ingress: + enabled: false + className: "" + annotations: {} + # kubernetes.io/ingress.class: nginx + # kubernetes.io/tls-acme: "true" + hosts: + - host: demo.dinky.org.cn + paths: + - path: / + pathType: ImplementationSpecific + tls: [] + + +service: + ## type determines how the Service is exposed. Defaults to ClusterIP. Valid options are ExternalName, ClusterIP, NodePort, and LoadBalancer + type: "ClusterIP" + name: "dinky" + +dinkyDefaultConfiguration: + create: true + append: true + +dinkyServiceAccount: + create: true + annotations: {} + name: "dinky" \ No newline at end of file diff --git a/dinky-assembly/src/main/assembly/package.xml b/dinky-assembly/src/main/assembly/package.xml index 90594c0c16..ff54e604bc 100644 --- a/dinky-assembly/src/main/assembly/package.xml +++ b/dinky-assembly/src/main/assembly/package.xml @@ -280,6 +280,13 @@ Dockerfile + + ${project.parent.basedir}/deploy + deploy + + **/** + +