From c24968492b0819a35471dbff5a3d8e07113c18df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=81=BF=E5=89=91?= Date: Wed, 11 Sep 2024 17:52:00 +0800 Subject: [PATCH] release 1.1.7-workflow-preview3 --- client/migrationx-common/pom.xml | 15 +- .../migrationx-domain-airflow/pom.xml | 2 +- .../migrationx-domain-aliyunemr/pom.xml | 2 +- .../migrationx-domain-azkaban/pom.xml | 2 +- .../migrationx-domain-caiyunjian/pom.xml | 2 +- .../migrationx-domain-core/pom.xml | 2 +- .../migrationx-domain-datago/pom.xml | 2 +- .../migrationx-domain-dataworks/pom.xml | 21 +- .../domain/dataworks/objects/entity/Node.java | 2 +- .../converter/DataWorksSpecNodeConverter.java | 102 +- .../service/spec/entity/DwNodeEntity.java | 8 +- .../spec/handler/BasicNodeSpecHandler.java | 57 +- .../spec/handler/EmrNodeSpecHandler.java | 28 +- .../DataWorksSpecNodeConverterTest.java | 153 +- .../pom.xml | 2 +- .../migrationx-domain-oozie/pom.xml | 2 +- client/migrationx-domain/pom.xml | 3 +- client/migrationx-reader/pom.xml | 2 +- client/migrationx-transformer/pom.xml | 2 +- client/migrationx-writer/pom.xml | 2 +- client/pom.xml | 15 +- pom.xml | 17 +- spec/pom.xml | 15 +- .../dataworks/common/spec/SpecUtil.java | 3 +- .../spec/domain/DataWorksTableSpec.java | 2 + .../spec/domain/DataWorksWorkflowSpec.java | 10 +- .../common/spec/domain/SpecEntity.java | 2 +- .../common/spec/domain/SpecRefEntity.java | 2 +- .../spec/domain/dw/codemodel/EmrCode.java | 16 +- .../dw/nodemodel/DataWorksNodeAdapter.java | 76 +- .../nodemodel/DataWorksNodeCodeAdapter.java | 19 +- .../DataWorksNodeInputOutputAdapter.java | 62 +- .../dw/nodemodel/SpecEntityDelegate.java | 61 + .../spec/domain/enums/FailureStrategy.java | 46 + .../common/spec/domain/enums/SpecKind.java | 16 +- .../common/spec/domain/enums/SpecVersion.java | 5 +- .../common/spec/domain/noref/SpecAnd.java | 8 +- .../spec/domain/noref/SpecAssertion.java | 2 + .../spec/domain/noref/SpecFlowDepend.java | 2 + .../{SpecCombined.java => SpecSubFlow.java} | 8 +- .../{ContainerNode.java => Container.java} | 4 +- .../spec/domain/ref/InputOutputWired.java | 31 + .../ScriptWired.java} | 10 +- .../spec/domain/ref/SpecDatasource.java | 2 + .../spec/domain/ref/SpecFileResource.java | 8 +- .../common/spec/domain/ref/SpecFunction.java | 8 +- .../common/spec/domain/ref/SpecNode.java | 59 +- .../spec/domain/ref/SpecRuntimeResource.java | 1 + .../spec/domain/ref/SpecScheduleStrategy.java | 54 + .../common/spec/domain/ref/SpecScript.java | 2 + .../common/spec/domain/ref/SpecTable.java | 1 + .../common/spec/domain/ref/SpecTrigger.java | 5 + .../common/spec/domain/ref/SpecVariable.java | 2 + .../common/spec/domain/ref/SpecWorkflow.java | 75 + .../domain/ref/runtime/SpecScriptRuntime.java | 12 +- .../specification/DataWorksNodeSpec.java | 43 - .../dataworks/common/spec/parser/Parser.java | 8 + .../common/spec/parser/SpecParserFactory.java | 7 +- .../parser/impl/DataWorksNodeSpecParser.java | 44 - .../spec/parser/impl/SpecJoinParser.java | 16 +- .../{NodeParser.java => SpecNodeParser.java} | 5 +- .../spec/parser/impl/SpecVariableParser.java | 6 +- .../spec/parser/impl/SpecWorkflowParser.java | 66 + ...CombinedParser.java => SubFlowParser.java} | 20 +- .../common/spec/utils/SpecDevUtil.java | 60 +- .../spec/writer/impl/AbstractWriter.java | 44 +- .../writer/impl/DataWorksNodeSpecWriter.java | 48 - .../impl/DataWorksWorkflowSpecWriter.java | 4 + .../spec/writer/impl/SpecNodeWriter.java | 5 +- .../spec/writer/impl/SpecWorkflowWriter.java | 99 + .../dataworks/common/spec/SpecUtilTest.java | 2168 ++++++++++------- .../common/spec/SpecWriterUtilTest.java | 1 + .../dw/codemodel/CodeModelFactoryTest.java | 123 +- .../spec/domain/dw/codemodel/EmrCodeTest.java | 4 +- .../nodemodel/DataWorksNodeAdapterTest.java | 224 +- .../DataWorksNodeCodeAdapterTest.java | 320 ++- .../DataWorksNodeInputOutputAdapterTest.java | 251 ++ .../common/spec/domain/ref/SpecNodeTest.java | 91 + .../impl/SpecScriptRuntimeParserTest.java | 1 + 79 files changed, 3225 insertions(+), 1505 deletions(-) create mode 100644 spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/SpecEntityDelegate.java create mode 100644 spec/src/main/java/com/aliyun/dataworks/common/spec/domain/enums/FailureStrategy.java rename spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/{SpecCombined.java => SpecSubFlow.java} (82%) rename spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/{ContainerNode.java => Container.java} (93%) create mode 100644 spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/InputOutputWired.java rename spec/src/main/java/com/aliyun/dataworks/common/spec/domain/{adapter/SpecNodeAdapter.java => ref/ScriptWired.java} (75%) create mode 100644 spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecScheduleStrategy.java create mode 100644 spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecWorkflow.java delete mode 100644 spec/src/main/java/com/aliyun/dataworks/common/spec/domain/specification/DataWorksNodeSpec.java delete mode 100644 spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/DataWorksNodeSpecParser.java rename spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/{NodeParser.java => SpecNodeParser.java} (95%) create mode 100644 spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecWorkflowParser.java rename spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/{CombinedParser.java => SubFlowParser.java} (64%) delete mode 100644 spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/DataWorksNodeSpecWriter.java create mode 100644 spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/SpecWorkflowWriter.java create mode 100644 spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeInputOutputAdapterTest.java create mode 100644 spec/src/test/java/com/aliyun/dataworks/common/spec/domain/ref/SpecNodeTest.java diff --git a/client/migrationx-common/pom.xml b/client/migrationx-common/pom.xml index a9267d5..8868b68 100644 --- a/client/migrationx-common/pom.xml +++ b/client/migrationx-common/pom.xml @@ -22,7 +22,7 @@ migrationx com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml @@ -103,6 +103,19 @@ + + org.apache.maven.plugins + maven-source-plugin + 3.0.1 + + + attach-sources + + jar + + + + org.apache.maven.plugins maven-dependency-plugin diff --git a/client/migrationx-domain/migrationx-domain-airflow/pom.xml b/client/migrationx-domain/migrationx-domain-airflow/pom.xml index 6aa751c..426fb10 100644 --- a/client/migrationx-domain/migrationx-domain-airflow/pom.xml +++ b/client/migrationx-domain/migrationx-domain-airflow/pom.xml @@ -20,7 +20,7 @@ migrationx-domain com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml 4.0.0 diff --git a/client/migrationx-domain/migrationx-domain-aliyunemr/pom.xml b/client/migrationx-domain/migrationx-domain-aliyunemr/pom.xml index 40b251a..d8fadb7 100644 --- a/client/migrationx-domain/migrationx-domain-aliyunemr/pom.xml +++ b/client/migrationx-domain/migrationx-domain-aliyunemr/pom.xml @@ -20,7 +20,7 @@ migrationx-domain com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml 4.0.0 diff --git a/client/migrationx-domain/migrationx-domain-azkaban/pom.xml b/client/migrationx-domain/migrationx-domain-azkaban/pom.xml index 80ea550..8f00c5b 100644 --- a/client/migrationx-domain/migrationx-domain-azkaban/pom.xml +++ b/client/migrationx-domain/migrationx-domain-azkaban/pom.xml @@ -20,7 +20,7 @@ migrationx-domain com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml 4.0.0 diff --git a/client/migrationx-domain/migrationx-domain-caiyunjian/pom.xml b/client/migrationx-domain/migrationx-domain-caiyunjian/pom.xml index 45a728e..2c0689e 100644 --- a/client/migrationx-domain/migrationx-domain-caiyunjian/pom.xml +++ b/client/migrationx-domain/migrationx-domain-caiyunjian/pom.xml @@ -20,7 +20,7 @@ migrationx-domain com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml 4.0.0 diff --git a/client/migrationx-domain/migrationx-domain-core/pom.xml b/client/migrationx-domain/migrationx-domain-core/pom.xml index cc1be82..22efa4a 100644 --- a/client/migrationx-domain/migrationx-domain-core/pom.xml +++ b/client/migrationx-domain/migrationx-domain-core/pom.xml @@ -21,7 +21,7 @@ com.aliyun.dataworks migrationx-domain - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml diff --git a/client/migrationx-domain/migrationx-domain-datago/pom.xml b/client/migrationx-domain/migrationx-domain-datago/pom.xml index cfd9f83..2d6c86c 100644 --- a/client/migrationx-domain/migrationx-domain-datago/pom.xml +++ b/client/migrationx-domain/migrationx-domain-datago/pom.xml @@ -20,7 +20,7 @@ migrationx-domain com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml 4.0.0 diff --git a/client/migrationx-domain/migrationx-domain-dataworks/pom.xml b/client/migrationx-domain/migrationx-domain-dataworks/pom.xml index 9ad7ea8..1e32acb 100644 --- a/client/migrationx-domain/migrationx-domain-dataworks/pom.xml +++ b/client/migrationx-domain/migrationx-domain-dataworks/pom.xml @@ -20,7 +20,7 @@ migrationx-domain com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml 4.0.0 @@ -64,12 +64,6 @@ org.springframework spring-beans - - org.codehaus.jackson - jackson-core-asl - 1.9.13 - compile - @@ -79,6 +73,19 @@ true + + org.apache.maven.plugins + maven-source-plugin + 3.0.1 + + + attach-sources + + jar + + + + \ No newline at end of file diff --git a/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/objects/entity/Node.java b/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/objects/entity/Node.java index 298bec7..d7b12b3 100644 --- a/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/objects/entity/Node.java +++ b/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/objects/entity/Node.java @@ -47,7 +47,7 @@ @JsonTypeInfo( use = Id.MINIMAL_CLASS, property = "@class") -public abstract class Node extends DmObject { +public class Node extends DmObject { @JacksonXmlProperty(isAttribute = true, localName = "name") private String name; diff --git a/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/converter/DataWorksSpecNodeConverter.java b/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/converter/DataWorksSpecNodeConverter.java index 271a52f..446bb94 100644 --- a/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/converter/DataWorksSpecNodeConverter.java +++ b/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/converter/DataWorksSpecNodeConverter.java @@ -1,6 +1,7 @@ package com.aliyun.dataworks.migrationx.domain.dataworks.service.converter; import java.nio.file.Paths; +import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; @@ -10,6 +11,7 @@ import com.aliyun.dataworks.common.spec.domain.SpecRefEntity; import com.aliyun.dataworks.common.spec.domain.Specification; import com.aliyun.dataworks.common.spec.domain.dw.nodemodel.DataWorksNodeAdapter; +import com.aliyun.dataworks.common.spec.domain.dw.nodemodel.DataWorksNodeAdapter.Context; import com.aliyun.dataworks.common.spec.domain.dw.nodemodel.DwNodeDependentTypeInfo; import com.aliyun.dataworks.common.spec.domain.dw.nodemodel.OutputContext; import com.aliyun.dataworks.common.spec.domain.dw.types.CodeProgramType; @@ -45,6 +47,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.ListUtils; +import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.StringUtils; /** @@ -99,6 +102,46 @@ public static FileDetail functionSpecToFileDetail(Specification spec) { + return componentSpecToFileDetail(spec, null); + } + + private static FileDetail componentSpecToFileDetail(Specification spec, String resourceId) { + FileDetail fileDetail = new FileDetail(); + File file = componentSpecToFile(spec, resourceId); + if (file == null) { + log.error("get file from function spec is null"); + return null; + } + + fileDetail.setFile(file); + fileDetail.setNodeCfg(initFileNodeCfgByFile(file)); + return fileDetail; + } + + private static File componentSpecToFile(Specification spec, String functionId) { + DataWorksWorkflowSpec dataWorksWorkflowSpec = spec.getSpec(); + if (spec.getSpec() == null) { + log.warn("dataworks component spec is null"); + return null; + } + + return ListUtils.emptyIfNull(dataWorksWorkflowSpec.getComponents()).stream() + .filter(x -> StringUtils.isBlank(functionId) || StringUtils.equals(x.getId(), functionId)) + .findFirst() + .map(specCom -> { + File fileCom = new File(); + fileCom.setFileName(specCom.getName()); + fileCom.setOwner(Optional.ofNullable(specCom.getMetadata()).map(m -> (String)m.get("owner")).orElse(null)); + fileCom.setFileTypeStr(Optional.ofNullable(specCom.getScript()).map(SpecScript::getRuntime).map(SpecScriptRuntime::getCommand) + .orElse(null)); + fileCom.setFileType(getScriptCommandTypeId(specCom.getScript())); + fileCom.setUseType(NodeUseType.COMPONENT.getValue()); + fileCom.setContent(Optional.ofNullable(specCom.getScript()).map(SpecScript::getContent).orElse(null)); + return fileCom; + }).orElse(null); + } + private static File functionSpecToFile(Specification spec, String functionId) { DataWorksWorkflowSpec dataWorksWorkflowSpec = spec.getSpec(); if (spec.getSpec() == null) { @@ -154,20 +197,26 @@ private static Integer getScriptCommandTypeId(SpecScript script) { } public static FileDetail nodeSpecToFileDetail(Specification spec, String nodeId) { + return nodeSpecToFileDetail(spec, nodeId, null); + } + + public static FileDetail nodeSpecToFileDetail(Specification spec, String nodeId, String content) { FileDetail fileDetail = new FileDetail(); - fileDetail.setFile(nodeSpecToFile(spec, nodeId)); + fileDetail.setFile(nodeSpecToFile(spec, nodeId, content)); fileDetail.setNodeCfg(nodeSpecToNodeCfg(spec, nodeId)); return fileDetail; } public static FileDetail nodeSpecToFileDetail(Specification spec) { FileDetail fileDetail = new FileDetail(); - fileDetail.setFile(nodeSpecToFile(spec, null)); + String nodeId = Optional.ofNullable(MapUtils.emptyIfNull(spec.getMetadata()).get("uuid")) + .map(String::valueOf).orElse(null); + fileDetail.setFile(nodeSpecToFile(spec, nodeId)); fileDetail.setNodeCfg(nodeSpecToNodeCfg(spec, null)); return fileDetail; } - public static File nodeSpecToFile(Specification spec, String nodeId) { + public static File nodeSpecToFile(Specification spec, String nodeId, String content) { DataWorksWorkflowSpec dataWorksWorkflowSpec = spec.getSpec(); if (spec.getSpec() == null) { log.warn("dataworks workflow spec is null"); @@ -181,7 +230,8 @@ public static File nodeSpecToFile(Specification spec, Str file.setCloudUuid(null); file.setCommitStatus(null); file.setConnName(Optional.ofNullable(specNode.getDatasource()).map(SpecDatasource::getName).orElse(null)); - file.setContent(Optional.ofNullable(specNode.getScript()).map(SpecScript::getContent).orElse(null)); + Optional.ofNullable(content).ifPresent(x -> Optional.ofNullable(specNode.getScript()).ifPresent(s -> s.setContent(x))); + file.setContent(new DataWorksNodeAdapter(spec, specNode, Context.builder().deployToScheduler(true).build()).getCode()); file.setCreateTime(null); file.setCreateUser(null); file.setCurrentVersion(null); @@ -251,18 +301,47 @@ public static File nodeSpecToFile(Specification spec, Str }).orElse(null); } + public static File nodeSpecToFile(Specification spec, String nodeId) { + return nodeSpecToFile(spec, nodeId, null); + } + public static SpecNode getMatchSpecNode(DataWorksWorkflowSpec dataWorksWorkflowSpec, String nodeId) { - for (SpecNode node : dataWorksWorkflowSpec.getNodes()) { + for (SpecNode node : ListUtils.emptyIfNull(dataWorksWorkflowSpec.getNodes())) { + // normal nodes if (StringUtils.isBlank(nodeId) || StringUtils.equalsIgnoreCase(node.getId(), nodeId)) { return node; } + + // inner nodes of normal nodes for (SpecNode innerNode : node.getInnerNodes()) { if (StringUtils.isBlank(nodeId) || StringUtils.equalsIgnoreCase(innerNode.getId(), nodeId)) { return innerNode; } } } - return null; + + // workflow inner node + SpecNode node = ListUtils.emptyIfNull(dataWorksWorkflowSpec.getWorkflows()).stream() + .map(wf -> ListUtils.emptyIfNull(wf.getNodes())) + .map(nodes -> nodes.stream().filter(n -> StringUtils.equalsIgnoreCase(nodeId, n.getId())).findAny().orElse(null)) + .filter(Objects::nonNull) + .findAny() + .orElse(null); + if (node != null) { + return node; + } + + // inner nodes of workflow inner node + return ListUtils.emptyIfNull(dataWorksWorkflowSpec.getWorkflows()).stream() + // workflow nodes + .map(wf -> ListUtils.emptyIfNull(wf.getNodes())) + .flatMap(List::stream) + // inner nodes of workflow nodes + .map(nodes -> ListUtils.emptyIfNull(nodes.getInnerNodes())) + .map(nodes -> nodes.stream().filter(n -> StringUtils.equalsIgnoreCase(nodeId, n.getId())).findAny().orElse(null)) + .filter(Objects::nonNull) + .findAny() + .orElse(null); } /** @@ -283,7 +362,11 @@ public static FileNodeCfg nodeSpecToNodeCfg(Specification FileNodeCfg nodeCfg = new FileNodeCfg(); nodeCfg.setAppId(null); nodeCfg.setBaselineId(null); - nodeCfg.setCreateTime(null); + Optional.ofNullable(specNode.getMetadata()) + .map(x -> x.get("createTime")) + .map(String::valueOf) + .map(DateUtils::convertStringToDate) + .ifPresent(nodeCfg::setCreateTime); nodeCfg.setCreateUser(null); nodeCfg.setCronExpress(Optional.ofNullable(specNode.getTrigger()).map(SpecTrigger::getCron).orElse(null)); nodeCfg.setCycleType(CronExpressUtil.parseCronToCycleType(nodeCfg.getCronExpress())); @@ -411,8 +494,9 @@ public static FileDetail snapshotContentToFileDetail(DataSnapshot snapshotDto) { .flatMap(snapshot -> Optional.ofNullable(DataSnapshotContent.of(snapshot.getContent())) .map(content -> { Specification specification = SpecUtil.parseToDomain(content.getSpec()); - FileDetail fileDetail = nodeSpecToFileDetail(specification); - Optional.ofNullable(fileDetail.getFile()).ifPresent(file -> file.setContent(content.getContent())); + String nodeId = Optional.ofNullable(MapUtils.emptyIfNull(specification.getMetadata()).get("uuid")) + .map(String::valueOf).orElse(snapshot.getEntityUuid()); + FileDetail fileDetail = nodeSpecToFileDetail(specification, nodeId, content.getContent()); return fileDetail; })) .orElse(null); diff --git a/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/spec/entity/DwNodeEntity.java b/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/spec/entity/DwNodeEntity.java index 85261c7..b17f4e0 100644 --- a/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/spec/entity/DwNodeEntity.java +++ b/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/spec/entity/DwNodeEntity.java @@ -371,9 +371,13 @@ public interface DwNodeEntity { */ SpecComponent getComponent(); - String getOrigin(); + default String getOrigin() { + return null; + } - String getWorkflowName(); + default String getWorkflowName() { + return null; + } /** * get config pack diff --git a/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/spec/handler/BasicNodeSpecHandler.java b/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/spec/handler/BasicNodeSpecHandler.java index d938a1c..b757e11 100644 --- a/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/spec/handler/BasicNodeSpecHandler.java +++ b/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/spec/handler/BasicNodeSpecHandler.java @@ -29,6 +29,8 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import com.alibaba.fastjson2.JSON; + import com.aliyun.dataworks.common.spec.adapter.SpecHandlerContext; import com.aliyun.dataworks.common.spec.adapter.handler.AbstractEntityHandler; import com.aliyun.dataworks.common.spec.domain.dw.codemodel.Code; @@ -69,9 +71,6 @@ import com.aliyun.dataworks.migrationx.domain.dataworks.utils.FolderUtils; import com.aliyun.migrationx.common.utils.DateUtils; import com.aliyun.migrationx.common.utils.GsonUtils; - -import com.alibaba.fastjson2.JSON; - import com.google.common.base.Preconditions; import com.google.common.reflect.TypeToken; import lombok.extern.slf4j.Slf4j; @@ -94,9 +93,9 @@ public boolean support(DwNodeEntity dwNode) { protected NodeSpecAdapter getSpecAdapter() { return Optional.ofNullable(context) - .map(SpecHandlerContext::getSpecAdapter) - .map(adapter -> (NodeSpecAdapter) adapter) - .orElseThrow(() -> new RuntimeException("SpecAdapter is null")); + .map(SpecHandlerContext::getSpecAdapter) + .map(adapter -> (NodeSpecAdapter)adapter) + .orElseThrow(() -> new RuntimeException("SpecAdapter is null")); } protected boolean matchNodeType(DwNodeEntity dwNode, CodeProgramType nodeType) { @@ -105,9 +104,9 @@ protected boolean matchNodeType(DwNodeEntity dwNode, CodeProgramType nodeType) { protected boolean matchNodeType(DwNodeEntity dwNode, String nodeType) { return Optional.ofNullable(dwNode) - .map(DwNodeEntity::getType) - .map(type -> StringUtils.equalsIgnoreCase(nodeType, type)) - .orElse(false); + .map(DwNodeEntity::getType) + .map(type -> StringUtils.equalsIgnoreCase(nodeType, type)) + .orElse(false); } @SuppressWarnings("unchecked") @@ -166,9 +165,9 @@ public SpecNode handle(DwNodeEntity dmNode, SpecNode specNode) { specNode.setPriority(dmNode.getPriority()); specNode.setTimeout(Optional.ofNullable(dmNode.getExtraConfig()) - .map(json -> GsonUtils.fromJsonString(json, new TypeToken>() {}.getType())) - .map(map -> (Map) map) - .map(map -> MapUtils.getInteger(map, "alisaTaskKillTimeout")).orElse(null)); + .map(json -> GsonUtils.fromJsonString(json, new TypeToken>() {}.getType())) + .map(map -> (Map)map) + .map(map -> MapUtils.getInteger(map, "alisaTaskKillTimeout")).orElse(null)); specNode.setRuntimeResource(Optional.ofNullable(dmNode.getResourceGroup()).map(resGroup -> { SpecRuntimeResource rt = new SpecRuntimeResource(); rt.setResourceGroup(resGroup); @@ -194,7 +193,7 @@ protected SpecScript toSpecScript(DwNodeEntity dmNodeBO, SpecHandlerContext cont specScript.setParameters(toScriptParameters(dmNodeBO)); specScript.setRuntime(toSpecScriptRuntime(dmNodeBO)); - specScript.setContent(toSpectScriptContent(dmNodeBO)); + specScript.setContent(toSpecScriptContent(dmNodeBO)); return specScript; } @@ -314,17 +313,17 @@ private String getScriptLanguage(DwNodeEntity dmNodeBO) { CodeModel cm = CodeModelFactory.getCodeModel(dmNodeBO.getType(), dmNodeBO.getCode()); return Optional.ofNullable(cm.getCodeModel()).filter(m -> m instanceof MultiLanguageScriptingCode) - .map(codeModel -> ((MultiLanguageScriptingCode) codeModel).getLanguage()).orElse(null); + .map(codeModel -> ((MultiLanguageScriptingCode)codeModel).getLanguage()).orElse(null); } public SpecScriptRuntime toSpecScriptRuntime(DwNodeEntity scr) { SpecScriptRuntime sr = new SpecScriptRuntime(); - CodeProgramType type = CodeProgramType.getNodeTypeByName(scr.getType()); - sr.setCommand(type.getName()); + sr.setCommand(scr.getType()); + sr.setCommandTypeId(scr.getTypeId()); return sr; } - public String toSpectScriptContent(DwNodeEntity dmNodeBO) { + public String toSpecScriptContent(DwNodeEntity dmNodeBO) { return null; } @@ -357,17 +356,17 @@ public void setNodeInputOutputs(SpecNode specNode, DwNodeEntity dmNodeBO, SpecHa log.warn("invalid input context value: {}", inCtx); } specVariable.setNode(node); - return (Input) specVariable; + return (Input)specVariable; }).collect(Collectors.toList()); Optional.ofNullable(specNode.getScript()).ifPresent(scr -> { List parameters = new ArrayList<>(Optional.ofNullable(scr.getParameters()).orElse(new ArrayList<>())); - parameters.addAll(ListUtils.emptyIfNull(inputVariables).stream().map(v -> (SpecVariable) v).collect(Collectors.toList())); + parameters.addAll(ListUtils.emptyIfNull(inputVariables).stream().map(v -> (SpecVariable)v).collect(Collectors.toList())); scr.setParameters(parameters); }); specNode.setInputs(ListUtils.emptyIfNull(inputVariables).stream() - .map(v -> (SpecVariable) v).map(SpecVariable::getReferenceVariable) - .filter(Objects::nonNull).collect(Collectors.toList())); + .map(v -> (SpecVariable)v).map(SpecVariable::getReferenceVariable) + .filter(Objects::nonNull).collect(Collectors.toList())); specNode.getInputs().addAll(getNodeInputs(dmNodeBO)); List outputCtxList = dmNodeBO.getOutputContexts(); @@ -379,7 +378,7 @@ public void setNodeInputOutputs(SpecNode specNode, DwNodeEntity dmNodeBO, SpecHa specVariable.setValue(outCtx.getParamValue()); specVariable.setNode(node); specVariable.setDescription(outCtx.getDescription()); - return (Output) specVariable; + return (Output)specVariable; }).collect(Collectors.toList()); specNode.setOutputs(outputVariables); specNode.getOutputs().addAll(getNodeOutputs(dmNodeBO, context)); @@ -396,11 +395,11 @@ private void sortNodeInputOutput(List nodeInputOutputs) { nodeInputOutputs.sort(Comparator.comparing(x -> { if (x instanceof SpecNodeOutput) { - return ((SpecNodeOutput) x).getArtifactType() + ((SpecNodeOutput) x).getData(); + return ((SpecNodeOutput)x).getArtifactType() + ((SpecNodeOutput)x).getData(); } else if (x instanceof SpecTable) { - return ((SpecTable) x).getArtifactType() + ((SpecTable) x).getName(); + return ((SpecTable)x).getArtifactType() + ((SpecTable)x).getName(); } else if (x instanceof SpecVariable) { - return ((SpecVariable) x).getArtifactType() + ((SpecVariable) x).getName(); + return ((SpecVariable)x).getArtifactType() + ((SpecVariable)x).getName(); } else { return x.toString(); } @@ -450,8 +449,8 @@ private Map getConfigPack() { try { InputStream inputStream = BasicNodeSpecHandler.class.getResourceAsStream("/nodemarket/config_pack_cache.json"); return JSON.parseObject( - IOUtils.toString(inputStream, StandardCharsets.UTF_8), - new TypeToken>() {}.getType()); + IOUtils.toString(inputStream, StandardCharsets.UTF_8), + new TypeToken>() {}.getType()); } catch (IOException e) { throw new RuntimeException(e); } @@ -469,7 +468,7 @@ protected String getPath(DwNodeEntity dwNode, SpecHandlerContext context) { try { return FolderUtils.normalizeConfigPackPathToSpec( - type.getCode(), dwNode.getFolder(), getConfigPack(), context.getLocale()) + "/" + dwNode.getName(); + type.getCode(), dwNode.getFolder(), getConfigPack(), context.getLocale()) + "/" + dwNode.getName(); } catch (Exception e) { throw new RuntimeException(e); } @@ -491,7 +490,7 @@ private List toNodeIos(List ios) { a.setData(out.getData()); a.setRefTableName(out.getRefTableName()); a.setIsDefault(Objects.equals(IoParseType.SYSTEM.getCode(), out.getParseType())); - return (T) a; + return (T)a; }).collect(Collectors.toList()); } diff --git a/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/spec/handler/EmrNodeSpecHandler.java b/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/spec/handler/EmrNodeSpecHandler.java index bdeeeee..6e8aa72 100644 --- a/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/spec/handler/EmrNodeSpecHandler.java +++ b/client/migrationx-domain/migrationx-domain-dataworks/src/main/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/spec/handler/EmrNodeSpecHandler.java @@ -55,25 +55,25 @@ public SpecScriptRuntime toSpecScriptRuntime(DwNodeEntity scr) { CodeModel code = CodeModelFactory.getCodeModel(scr.getType(), scr.getCode()); Map emrJobConfig = Maps.newHashMap(); Optional.ofNullable(code.getCodeModel()).flatMap(emrCode -> Optional.ofNullable(emrCode.getLauncher()).map(EmrLauncher::getAllocationSpec)) - .ifPresent(allocSpecMap -> { - EmrAllocationSpec allocSpec = EmrAllocationSpec.of(allocSpecMap); - emrJobConfig.put("session_enabled", allocSpec.getReuseSession()); - emrJobConfig.put("priority", allocSpec.getPriority()); - emrJobConfig.put("cores", allocSpec.getVcores()); - emrJobConfig.put("memory", allocSpec.getMemory()); - emrJobConfig.put("queue", allocSpec.getQueue()); - emrJobConfig.put("submit_mode", Optional.ofNullable(allocSpec.getUseGateway()) - .map(useGateway -> useGateway ? EmrJobSubmitMode.LOCAL : EmrJobSubmitMode.YARN)); - emrJobConfig.put("submitter", allocSpec.getUserName()); - emrJobConfig.put("execute_mode", Optional.ofNullable(allocSpec.getBatchMode()) - .map(batchMode -> batchMode ? EmrJobExecuteMode.BATCH : EmrJobExecuteMode.SINGLE)); - }); + .ifPresent(allocSpecMap -> { + EmrAllocationSpec allocSpec = EmrAllocationSpec.of(allocSpecMap); + emrJobConfig.put("session_enabled", allocSpec.getReuseSession()); + emrJobConfig.put("priority", allocSpec.getPriority()); + emrJobConfig.put("cores", allocSpec.getVcores()); + emrJobConfig.put("memory", allocSpec.getMemory()); + emrJobConfig.put("queue", allocSpec.getQueue()); + emrJobConfig.put("submit_mode", Optional.ofNullable(allocSpec.getUseGateway()) + .map(useGateway -> useGateway ? EmrJobSubmitMode.LOCAL : EmrJobSubmitMode.YARN)); + emrJobConfig.put("submitter", allocSpec.getUserName()); + emrJobConfig.put("execute_mode", Optional.ofNullable(allocSpec.getBatchMode()) + .map(batchMode -> batchMode ? EmrJobExecuteMode.BATCH : EmrJobExecuteMode.SINGLE)); + }); emrRuntime.setEmrJobConfig(emrJobConfig); return emrRuntime; } @Override - public String toSpectScriptContent(DwNodeEntity dmNodeBO) { + public String toSpecScriptContent(DwNodeEntity dmNodeBO) { CodeModel code = CodeModelFactory.getCodeModel(dmNodeBO.getType(), dmNodeBO.getCode()); return code.getSourceCode(); } diff --git a/client/migrationx-domain/migrationx-domain-dataworks/src/test/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/converter/DataWorksSpecNodeConverterTest.java b/client/migrationx-domain/migrationx-domain-dataworks/src/test/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/converter/DataWorksSpecNodeConverterTest.java index 18c1375..c9ac3b5 100644 --- a/client/migrationx-domain/migrationx-domain-dataworks/src/test/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/converter/DataWorksSpecNodeConverterTest.java +++ b/client/migrationx-domain/migrationx-domain-dataworks/src/test/java/com/aliyun/dataworks/migrationx/domain/dataworks/service/converter/DataWorksSpecNodeConverterTest.java @@ -1,14 +1,20 @@ package com.aliyun.dataworks.migrationx.domain.dataworks.service.converter; +import java.util.Collections; + import com.alibaba.fastjson2.JSON; import com.aliyun.dataworks.common.spec.SpecUtil; import com.aliyun.dataworks.common.spec.domain.DataWorksWorkflowSpec; import com.aliyun.dataworks.common.spec.domain.Specification; +import com.aliyun.dataworks.common.spec.domain.noref.SpecDoWhile; +import com.aliyun.dataworks.common.spec.domain.ref.SpecNode; +import com.aliyun.dataworks.common.spec.domain.ref.SpecWorkflow; import com.aliyun.dataworks.migrationx.domain.dataworks.objects.entity.client.FileDetail; import com.aliyun.dataworks.migrationx.domain.dataworks.objects.entity.v5.DataSnapshot; import com.aliyun.dataworks.migrationx.domain.dataworks.objects.entity.v5.DataSnapshot.DataSnapshotContent; import com.aliyun.dataworks.migrationx.domain.dataworks.objects.types.NodeUseType; +import lombok.extern.slf4j.Slf4j; import org.junit.Assert; import org.junit.Test; @@ -16,6 +22,7 @@ * @author 戒迷 * @date 2024/4/16 */ +@Slf4j public class DataWorksSpecNodeConverterTest { @Test @@ -27,48 +34,71 @@ public void testHandleNodeSpec() throws Exception { + "\t\t\"nodes\":[\n" + "\t\t\t{\n" + "\t\t\t\t\"recurrence\":\"Normal\",\n" - + "\t\t\t\t\"id\":\"26248077\",\n" + + "\t\t\t\t\"id\":\"7031136461380012389\",\n" + "\t\t\t\t\"timeout\":0,\n" + "\t\t\t\t\"instanceMode\":\"T+1\",\n" + "\t\t\t\t\"rerunMode\":\"Allowed\",\n" - + "\t\t\t\t\"rerunTimes\":0,\n" - + "\t\t\t\t\"rerunInterval\":120000,\n" + + "\t\t\t\t\"rerunTimes\":3,\n" + + "\t\t\t\t\"rerunInterval\":180000,\n" + "\t\t\t\t\"datasource\":{\n" - + "\t\t\t\t\t\"name\":\"odps_first\",\n" - + "\t\t\t\t\t\"type\":\"odps\"\n" + + "\t\t\t\t\t\"name\":\"test_current_account_hadoop\",\n" + + "\t\t\t\t\t\"type\":\"emr\"\n" + "\t\t\t\t},\n" + "\t\t\t\t\"script\":{\n" - + "\t\t\t\t\t\"path\":\"业务流程/建模引擎/MaxCompute/数据开发/config_driver数据同步/model_table\",\n" + + "\t\t\t\t\t\"language\":\"hive-sql\",\n" + + "\t\t\t\t\t\"path\":\"212112/hive01\",\n" + + "\t\t\t\t\t\"content\":\"--EMR Hive SQL\n" + + "--********************************************************************--\n" + + "--author: dw_on_emr_qa3@test.aliyunid.com\n" + + "--create time: 2024-09-10 11:07:44\n" + + "--EMR任务只能运行在独享资源组上\n" + + "--********************************************************************--\n" + + "select 2;\",\n" + "\t\t\t\t\t\"runtime\":{\n" - + "\t\t\t\t\t\t\"command\":\"ODPS_SQL\"\n" - + "\t\t\t\t\t},\n" - + "\t\t\t\t\t\"parameters\":[\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"name\":\"bizdate\",\n" - + "\t\t\t\t\t\t\t\"artifactType\":\"Variable\",\n" - + "\t\t\t\t\t\t\t\"scope\":\"NodeParameter\",\n" - + "\t\t\t\t\t\t\t\"type\":\"System\",\n" - + "\t\t\t\t\t\t\t\"value\":\"$[yyyymmdd-1]\"\n" + + "\t\t\t\t\t\t\"command\":\"EMR_HIVE\",\n" + + "\t\t\t\t\t\t\"commandTypeId\":227,\n" + + "\t\t\t\t\t\t\"emrJobConfig\":{\n" + + "\t\t\t\t\t\t\t\n" + "\t\t\t\t\t\t}\n" - + "\t\t\t\t\t]\n" + + "\t\t\t\t\t},\n" + + "\t\t\t\t\t\"id\":\"7064766281846260070\"\n" + "\t\t\t\t},\n" + "\t\t\t\t\"trigger\":{\n" + "\t\t\t\t\t\"type\":\"Scheduler\",\n" - + "\t\t\t\t\t\"cron\":\"00 29 00 * * ?\",\n" + + "\t\t\t\t\t\"id\":\"8439753760239652464\",\n" + + "\t\t\t\t\t\"cron\":\"00 24 00 * * ?\",\n" + "\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" - + "\t\t\t\t\t\"endTime\":\"9999-01-01 15:12:51\",\n" - + "\t\t\t\t\t\"timezone\":\"Asia/Shanghai\"\n" + + "\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\",\n" + + "\t\t\t\t\t\"timezone\":\"Asia/Shanghai\",\n" + + "\t\t\t\t\t\"delaySeconds\":0\n" + "\t\t\t\t},\n" + "\t\t\t\t\"runtimeResource\":{\n" - + "\t\t\t\t\t\"resourceGroup\":\"group_20051853\",\n" - + "\t\t\t\t\t\"resourceGroupId\":\"20051853\"\n" + + "\t\t\t\t\t\"resourceGroup\":\"S_res_group_524257424564736_1710147121495\",\n" + + "\t\t\t\t\t\"id\":\"8717954356554596115\",\n" + + "\t\t\t\t\t\"resourceGroupId\":\"67614320\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"name\":\"hive01\",\n" + + "\t\t\t\t\"owner\":\"1107550004253538\",\n" + + "\t\t\t\t\"metadata\":{\n" + + "\t\t\t\t\t\"owner\":\"1107550004253538\",\n" + + "\t\t\t\t\t\"ownerName\":\"dw_on_emr_qa3@test.aliyunid.com\",\n" + + "\t\t\t\t\t\"createTime\":\"2024-09-10 11:07:44\",\n" + + "\t\t\t\t\t\"tenantId\":\"524257424564736\",\n" + + "\t\t\t\t\t\"project\":{\n" + + "\t\t\t\t\t\t\"mode\":\"SIMPLE\",\n" + + "\t\t\t\t\t\t\"projectId\":\"289221\",\n" + + "\t\t\t\t\t\t\"projectIdentifier\":\"emr_meta_test\",\n" + + "\t\t\t\t\t\t\"projectName\":\"EMR 元数据测试\",\n" + + "\t\t\t\t\t\t\"projectOwnerId\":\"1107550004253538\",\n" + + "\t\t\t\t\t\t\"simple\":true,\n" + + "\t\t\t\t\t\t\"tenantId\":\"524257424564736\"\n" + + "\t\t\t\t\t},\n" + + "\t\t\t\t\t\"projectId\":\"289221\"\n" + "\t\t\t\t},\n" - + "\t\t\t\t\"name\":\"model_table\",\n" - + "\t\t\t\t\"owner\":\"370260\",\n" + "\t\t\t\t\"inputs\":{\n" + "\t\t\t\t\t\"nodeOutputs\":[\n" + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"data\":\"dataworks_meta.dwd_base_config_driver_data_jsondata_df\",\n" + + "\t\t\t\t\t\t\t\"data\":\"emr_meta_test_root\",\n" + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" + "\t\t\t\t\t\t}\n" + "\t\t\t\t\t]\n" @@ -76,12 +106,10 @@ public void testHandleNodeSpec() throws Exception { + "\t\t\t\t\"outputs\":{\n" + "\t\t\t\t\t\"nodeOutputs\":[\n" + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"data\":\"dataworks_analyze.26248077_out\",\n" - + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" - + "\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"data\":\"dataworks_analyze.model_table_config_driver\",\n" - + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" + + "\t\t\t\t\t\t\t\"data\":\"7031136461380012389\",\n" + + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\"refTableName\":\"hive01\",\n" + + "\t\t\t\t\t\t\t\"isDefault\":true\n" + "\t\t\t\t\t\t}\n" + "\t\t\t\t\t]\n" + "\t\t\t\t}\n" @@ -89,15 +117,19 @@ public void testHandleNodeSpec() throws Exception { + "\t\t],\n" + "\t\t\"flow\":[\n" + "\t\t\t{\n" - + "\t\t\t\t\"nodeId\":\"26248077\",\n" + + "\t\t\t\t\"nodeId\":\"7031136461380012389\",\n" + "\t\t\t\t\"depends\":[\n" + "\t\t\t\t\t{\n" + "\t\t\t\t\t\t\"type\":\"Normal\",\n" - + "\t\t\t\t\t\t\"output\":\"dataworks_meta.dwd_base_config_driver_data_jsondata_df\"\n" + + "\t\t\t\t\t\t\"output\":\"emr_meta_test_root\"\n" + "\t\t\t\t\t}\n" + "\t\t\t\t]\n" + "\t\t\t}\n" + "\t\t]\n" + + "\t},\n" + + "\t\"metadata\":{\n" + + "\t\t\"gmtModified\":1725937675000,\n" + + "\t\t\"uuid\":\"7031136461380012389\"\n" + "\t}\n" + "}"; Specification spec = SpecUtil.parseToDomain(specStr); @@ -476,6 +508,63 @@ public void testDataSnapshotToFileDetail() { Assert.assertEquals(content.getContent(), fileDetail.getFile().getContent()); Assert.assertNotNull(fileDetail.getNodeCfg()); } + + @Test + public void testDataSnapshotComponentToFileDetail() { + String specStr = "{\n" + + "\t\"version\":\"1.1.0\",\n" + + "\t\"kind\":\"Component\",\n" + + "\t\"spec\":{\n" + + "\t\t\"components\":[\n" + + "\t\t\t{\n" + + "\t\t\t\t\"name\":\"test_sql_component\",\n" + + "\t\t\t\t\"id\":\"5640313746029937468\",\n" + + "\t\t\t\t\"owner\":\"453125\",\n" + + "\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\"id\":\"5206893221480063330\",\n" + + "\t\t\t\t\t\"language\":\"odps-sql\",\n" + + "\t\t\t\t\t\"path\":\"test_sql_component\",\n" + + "\t\t\t\t\t\"content\":\"select 1;\",\n" + + "\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\"command\":\"SQL_COMPONENT\",\n" + + "\t\t\t\t\t\t\"commandTypeId\":3010\n" + + "\t\t\t\t\t}\n" + + "\t\t\t\t}\n" + + "\t\t\t}\n" + + "\t\t]\n" + + "\t}\n" + + "}"; + + Specification spec = SpecUtil.parseToDomain(specStr); + FileDetail fileDetail = DataWorksSpecNodeConverter.componentSpecToFileDetail(spec); + Assert.assertNotNull(fileDetail); + Assert.assertNotNull(fileDetail.getFile()); + log.info("file: {}", JSON.toJSONString(fileDetail.getFile())); + Assert.assertEquals(30, (int)fileDetail.getFile().getUseType()); + Assert.assertEquals(3010, (int)fileDetail.getFile().getFileType()); + } + + @Test + public void testWorkflowInnerNodeMatchCase() { + Specification specification = new Specification<>(); + DataWorksWorkflowSpec spec = new DataWorksWorkflowSpec(); + SpecWorkflow wf = new SpecWorkflow(); + wf.setId("wf1"); + SpecNode dowhile = new SpecNode(); + dowhile.setId("dowhile1"); + SpecDoWhile dowhileDef = new SpecDoWhile(); + SpecNode dowhileInner1 = new SpecNode(); + dowhileInner1.setId("dowhileInner1"); + dowhileDef.setNodes(Collections.singletonList(dowhileInner1)); + dowhile.setDoWhile(dowhileDef); + wf.setNodes(Collections.singletonList(dowhile)); + spec.setWorkflows(Collections.singletonList(wf)); + specification.setSpec(spec); + + Assert.assertNotNull(DataWorksSpecNodeConverter.getMatchSpecNode(specification.getSpec(), "dowhileInner1")); + Assert.assertEquals(dowhileInner1.getId(), DataWorksSpecNodeConverter.getMatchSpecNode(specification.getSpec(), "dowhileInner1").getId()); + Assert.assertEquals(dowhile.getId(), DataWorksSpecNodeConverter.getMatchSpecNode(specification.getSpec(), "dowhile1").getId()); + } } // Generated with love by TestMe :) Please report issues and submit feature requests at: http://weirddev.com/forum#!/testme \ No newline at end of file diff --git a/client/migrationx-domain/migrationx-domain-dolphinscheduler/pom.xml b/client/migrationx-domain/migrationx-domain-dolphinscheduler/pom.xml index 1f38866..222c627 100644 --- a/client/migrationx-domain/migrationx-domain-dolphinscheduler/pom.xml +++ b/client/migrationx-domain/migrationx-domain-dolphinscheduler/pom.xml @@ -20,7 +20,7 @@ migrationx-domain com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml 4.0.0 diff --git a/client/migrationx-domain/migrationx-domain-oozie/pom.xml b/client/migrationx-domain/migrationx-domain-oozie/pom.xml index 5666ea1..8d8e44d 100644 --- a/client/migrationx-domain/migrationx-domain-oozie/pom.xml +++ b/client/migrationx-domain/migrationx-domain-oozie/pom.xml @@ -20,7 +20,7 @@ migrationx-domain com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml 4.0.0 diff --git a/client/migrationx-domain/pom.xml b/client/migrationx-domain/pom.xml index 2c8e8fb..651ba62 100644 --- a/client/migrationx-domain/pom.xml +++ b/client/migrationx-domain/pom.xml @@ -23,13 +23,12 @@ migrationx com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml migrationx-domain - migrationx-domain-core migrationx-domain-dolphinscheduler diff --git a/client/migrationx-reader/pom.xml b/client/migrationx-reader/pom.xml index e34f2f1..2edc06b 100644 --- a/client/migrationx-reader/pom.xml +++ b/client/migrationx-reader/pom.xml @@ -20,7 +20,7 @@ migrationx com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml 4.0.0 diff --git a/client/migrationx-transformer/pom.xml b/client/migrationx-transformer/pom.xml index f6d1e16..defd394 100644 --- a/client/migrationx-transformer/pom.xml +++ b/client/migrationx-transformer/pom.xml @@ -20,7 +20,7 @@ migrationx com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml 4.0.0 diff --git a/client/migrationx-writer/pom.xml b/client/migrationx-writer/pom.xml index 446d357..3d0817f 100644 --- a/client/migrationx-writer/pom.xml +++ b/client/migrationx-writer/pom.xml @@ -21,7 +21,7 @@ com.aliyun.dataworks migrationx - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml diff --git a/client/pom.xml b/client/pom.xml index 543aefe..ba793bf 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -19,7 +19,7 @@ com.aliyun.dataworks dataworks-tool - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml @@ -428,6 +428,19 @@ + + org.apache.maven.plugins + maven-source-plugin + 3.0.1 + + + attach-sources + + jar + + + + diff --git a/pom.xml b/pom.xml index 57999a2..3103bc1 100644 --- a/pom.xml +++ b/pom.xml @@ -19,7 +19,7 @@ 4.0.0 com.aliyun.dataworks dataworks-tool - 1.1.7-1 + 1.1.7-workflow-preview3 pom dataworks-tool @@ -33,7 +33,7 @@ required maven 3.5.0+ refer to https://maven.apache.org/maven-ci-friendly.html --> - 1.1.7-1 + 1.1.7-SNAPSHOT 1.8 1.8 UTF-8 @@ -293,19 +293,6 @@ maven-dependency-plugin 3.6.1 - - org.apache.maven.plugins - maven-source-plugin - 3.2.1 - - - attach-sources - - jar - - - - diff --git a/spec/pom.xml b/spec/pom.xml index e4c911f..25471d4 100644 --- a/spec/pom.xml +++ b/spec/pom.xml @@ -19,7 +19,7 @@ dataworks-tool com.aliyun.dataworks - 1.1.7-1 + 1.1.7-workflow-preview3 ../pom.xml @@ -116,6 +116,19 @@ org.apache.maven.plugins maven-deploy-plugin + + org.apache.maven.plugins + maven-source-plugin + 3.0.1 + + + attach-sources + + jar + + + + diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/SpecUtil.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/SpecUtil.java index eb8a981..5ebf670 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/SpecUtil.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/SpecUtil.java @@ -30,6 +30,7 @@ import com.aliyun.dataworks.common.spec.parser.SpecParserFactory; import com.aliyun.dataworks.common.spec.parser.ToDomainRootParser; import com.aliyun.dataworks.common.spec.utils.ParserUtil; +import com.aliyun.dataworks.common.spec.utils.SpecDevUtil; import com.aliyun.dataworks.common.spec.writer.SpecWriterContext; import com.aliyun.dataworks.common.spec.writer.WriterFactory; import com.aliyun.dataworks.common.spec.writer.impl.SpecificationWriter; @@ -81,7 +82,7 @@ public static Object write(T specObject, SpecWriterContext context) { return Optional.ofNullable(WriterFactory.getWriter(specObject.getClass(), context)) .map(writer -> writer.write(specObject, context)) - .orElse(JSON.toJSON(specObject)); + .orElse(SpecDevUtil.writeJsonObject(specObject, false)); } @SuppressWarnings({"unchecked", "rawtypes"}) diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/DataWorksTableSpec.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/DataWorksTableSpec.java index e2057cb..bab970a 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/DataWorksTableSpec.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/DataWorksTableSpec.java @@ -6,11 +6,13 @@ import com.aliyun.dataworks.common.spec.domain.enums.SpecKind; import com.aliyun.dataworks.common.spec.domain.ref.SpecTable; import lombok.Data; +import lombok.EqualsAndHashCode; /** * @author 子梁 * @date 2024/4/23 */ +@EqualsAndHashCode(callSuper = true) @Data public class DataWorksTableSpec extends SpecTable implements Spec { @Override diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/DataWorksWorkflowSpec.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/DataWorksWorkflowSpec.java index d137df1..23b0243 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/DataWorksWorkflowSpec.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/DataWorksWorkflowSpec.java @@ -31,6 +31,7 @@ import com.aliyun.dataworks.common.spec.domain.ref.SpecScript; import com.aliyun.dataworks.common.spec.domain.ref.SpecTrigger; import com.aliyun.dataworks.common.spec.domain.ref.SpecVariable; +import com.aliyun.dataworks.common.spec.domain.ref.SpecWorkflow; import com.aliyun.dataworks.common.spec.domain.ref.component.SpecComponent; import lombok.Data; import lombok.EqualsAndHashCode; @@ -41,9 +42,12 @@ import static com.aliyun.dataworks.common.spec.domain.enums.SpecKind.DATASOURCE; import static com.aliyun.dataworks.common.spec.domain.enums.SpecKind.DATA_CATALOG; import static com.aliyun.dataworks.common.spec.domain.enums.SpecKind.DATA_QUALITY; +import static com.aliyun.dataworks.common.spec.domain.enums.SpecKind.FUNCTION; import static com.aliyun.dataworks.common.spec.domain.enums.SpecKind.MANUAL_NODE; import static com.aliyun.dataworks.common.spec.domain.enums.SpecKind.MANUAL_WORKFLOW; +import static com.aliyun.dataworks.common.spec.domain.enums.SpecKind.NODE; import static com.aliyun.dataworks.common.spec.domain.enums.SpecKind.PAIFLOW; +import static com.aliyun.dataworks.common.spec.domain.enums.SpecKind.RESOURCE; import static com.aliyun.dataworks.common.spec.domain.enums.SpecKind.TEMPORARY_WORKFLOW; /** @@ -68,6 +72,7 @@ public class DataWorksWorkflowSpec extends SpecRefEntity implements Spec { private List fileResources; private List functions; private List nodes; + private List workflows; private List components; private List flow; @@ -83,6 +88,9 @@ public List getKinds() { DATASOURCE, DATA_QUALITY, DATA_CATALOG, - COMPONENT); + COMPONENT, + NODE, + RESOURCE, + FUNCTION); } } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/SpecEntity.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/SpecEntity.java index 8040d21..3dce5d2 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/SpecEntity.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/SpecEntity.java @@ -29,7 +29,7 @@ * @date 2023/7/4 */ @Data -@EqualsAndHashCode(exclude = "metadata") +@EqualsAndHashCode(exclude = {"metadata", "context"}) public class SpecEntity { private Map metadata; diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/SpecRefEntity.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/SpecRefEntity.java index 485b2bb..6895a3b 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/SpecRefEntity.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/SpecRefEntity.java @@ -29,7 +29,7 @@ * @date 2023/7/25 */ @Data -@EqualsAndHashCode(callSuper = true) +@EqualsAndHashCode(callSuper = true, exclude = "isRef") @ToString(callSuper = true) public class SpecRefEntity extends SpecEntity { private String id; diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/codemodel/EmrCode.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/codemodel/EmrCode.java index af48c7d..a23ec13 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/codemodel/EmrCode.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/codemodel/EmrCode.java @@ -24,7 +24,6 @@ import com.aliyun.dataworks.common.spec.domain.dw.types.CalcEngineType; import com.aliyun.dataworks.common.spec.domain.dw.types.CodeProgramType; import com.aliyun.dataworks.common.spec.utils.JSONUtils; - import lombok.Data; import lombok.EqualsAndHashCode; import lombok.ToString; @@ -53,9 +52,6 @@ public class EmrCode extends AbstractBaseCode { @Override public EmrCode parse(String code) { EmrCode m = JSONUtils.parseObject(code, EmrCode.class); - if (m == null) { - return new EmrCode(); - } Optional.ofNullable(m).ifPresent(mm -> { this.setName(mm.getName()); this.setType(mm.getType()); @@ -108,8 +104,8 @@ public static EmrJobType getEmrJobType(String defaultNodeType) { @Override public List getProgramTypes() { return Arrays.stream(CodeProgramType.values()) - .map(Enum::name) - .filter(named -> CodeProgramType.matchEngine(named, CalcEngineType.EMR)).collect(Collectors.toList()); + .map(Enum::name) + .filter(named -> CodeProgramType.matchEngine(named, CalcEngineType.EMR)).collect(Collectors.toList()); } @Override @@ -131,9 +127,9 @@ public void setSourceCode(String sourceCode) { @Override public String getSourceCode() { return Optional.ofNullable(properties) - .map(EmrProperty::getArguments) - .orElse(ListUtils.emptyIfNull(null)) - .stream() - .findFirst().orElse(null); + .map(EmrProperty::getArguments) + .orElse(ListUtils.emptyIfNull(null)) + .stream() + .findFirst().orElse(null); } } diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeAdapter.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeAdapter.java index c7656ea..10e1e5e 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeAdapter.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeAdapter.java @@ -26,8 +26,8 @@ import com.alibaba.fastjson2.JSON; import com.aliyun.dataworks.common.spec.domain.DataWorksWorkflowSpec; +import com.aliyun.dataworks.common.spec.domain.SpecRefEntity; import com.aliyun.dataworks.common.spec.domain.Specification; -import com.aliyun.dataworks.common.spec.domain.adapter.SpecNodeAdapter; import com.aliyun.dataworks.common.spec.domain.dw.types.CodeProgramType; import com.aliyun.dataworks.common.spec.domain.enums.DependencyType; import com.aliyun.dataworks.common.spec.domain.enums.TriggerType; @@ -43,6 +43,7 @@ import com.aliyun.dataworks.common.spec.domain.ref.SpecScript; import com.aliyun.dataworks.common.spec.domain.ref.SpecTrigger; import com.aliyun.dataworks.common.spec.domain.ref.SpecVariable; +import com.aliyun.dataworks.common.spec.domain.ref.SpecWorkflow; import com.aliyun.dataworks.common.spec.domain.ref.runtime.SpecScriptRuntime; import com.aliyun.dataworks.common.spec.exception.SpecException; import lombok.Builder; @@ -59,16 +60,18 @@ * @author 聿剑 * @date 2023/11/9 */ -public class DataWorksNodeAdapter implements SpecNodeAdapter, DataWorksNode, DataWorksNodeAdapterContextAware { +public class DataWorksNodeAdapter implements DataWorksNode, DataWorksNodeAdapterContextAware { public static final String TIMEOUT = "alisaTaskKillTimeout"; public static final String IGNORE_BRANCH_CONDITION_SKIP = "ignoreBranchConditionSkip"; public static final String LOOP_COUNT = "loopCount"; + public static final String STREAM_LAUNCH_MODE = "streamLaunchMode"; public static final Integer NODE_TYPE_NORMAL = 0; public static final Integer NODE_TYPE_MANUAL = 1; public static final Integer NODE_TYPE_PAUSE = 2; public static final Integer NODE_TYPE_SKIP = 3; private static final Logger logger = LoggerFactory.getLogger(DataWorksNodeAdapter.class); + private static final String DELAY_SECONDS = "delaySeconds"; /** * @author 聿剑 @@ -83,32 +86,40 @@ public static class Context { protected final DataWorksWorkflowSpec specification; protected final Specification spec; - protected final SpecNode specNode; + protected final SpecEntityDelegate delegate; protected Context context; - public DataWorksNodeAdapter(Specification specification, SpecNode specNode) { + public DataWorksNodeAdapter(Specification specification, SpecRefEntity specEntity) { this.spec = specification; this.specification = this.spec.getSpec(); - this.specNode = specNode; + this.delegate = new SpecEntityDelegate<>(specEntity); this.context = Context.builder().build(); } - public DataWorksNodeAdapter(Specification specification, SpecNode specNode, Context context) { + public DataWorksNodeAdapter(Specification specification, SpecRefEntity specEntity, Context context) { this.spec = specification; this.specification = this.spec.getSpec(); - this.specNode = specNode; + this.delegate = new SpecEntityDelegate<>(specEntity); this.context = context; } - @Override - public SpecNode getSpecNode() { - return specNode; - } - @Override public DwNodeDependentTypeInfo getDependentType(Function, List> getNodeIdsByOutputs) { - SpecFlowDepend specNodeFlowDepend = ListUtils.emptyIfNull(specification.getFlow()).stream() - .filter(fd -> StringUtils.equalsIgnoreCase(specNode.getId(), fd.getNodeId().getId())) + List flows = ListUtils.emptyIfNull(specification.getFlow()); + // if the current node is inner node of a workflow, use the workflow's dependency list to get dependency type + SpecWorkflow outerWorkflow = Optional.ofNullable(spec) + .map(Specification::getSpec) + .map(DataWorksWorkflowSpec::getWorkflows).flatMap(wfs -> + wfs.stream().filter(wf -> + ListUtils.emptyIfNull(wf.getNodes()).stream().anyMatch(n -> + StringUtils.equalsIgnoreCase(n.getId(), delegate.getId()))).findFirst()) + .orElse(null); + if (outerWorkflow != null) { + flows = outerWorkflow.getDependencies(); + } + + SpecFlowDepend specNodeFlowDepend = ListUtils.emptyIfNull(flows).stream() + .filter(fd -> StringUtils.equalsIgnoreCase(delegate.getId(), fd.getNodeId().getId())) .peek(fd -> logger.info("node flow depends source nodeId: {}, depends: {}", JSON.toJSONString(fd.getNodeId()), JSON.toJSONString(fd.getDepends()))) .findFirst().orElse(null); @@ -173,34 +184,34 @@ private DwNodeDependentTypeInfo getDependentType(SpecFlowDepend specNodeFlowDepe @Override public String getCode() { - DataWorksNodeCodeAdapter codeAdapter = new DataWorksNodeCodeAdapter(specNode); + DataWorksNodeCodeAdapter codeAdapter = new DataWorksNodeCodeAdapter(delegate.getObject()); codeAdapter.setContext(context); return codeAdapter.getCode(); } @Override public List getInputs() { - return new DataWorksNodeInputOutputAdapter(this.spec, specNode).getInputs(); + return new DataWorksNodeInputOutputAdapter(this.spec, delegate.getObject()).getInputs(); } @Override public List getOutputs() { - return new DataWorksNodeInputOutputAdapter(this.spec, specNode).getOutputs(); + return new DataWorksNodeInputOutputAdapter(this.spec, delegate.getObject()).getOutputs(); } @Override public List getInputContexts() { - return new DataWorksNodeInputOutputAdapter(this.spec, specNode).getInputContexts(); + return new DataWorksNodeInputOutputAdapter(this.spec, delegate.getObject()).getInputContexts(); } @Override public List getOutputContexts() { - return new DataWorksNodeInputOutputAdapter(this.spec, specNode).getOutputContexts(); + return new DataWorksNodeInputOutputAdapter(this.spec, delegate.getObject()).getOutputContexts(); } @Override public String getParaValue() { - return Optional.ofNullable(specNode).map(SpecNode::getScript).map(SpecScript::getRuntime) + return Optional.ofNullable(delegate.getScript()).map(SpecScript::getRuntime) .map(SpecScriptRuntime::getCommand) .map(cmd -> { if (StringUtils.equalsIgnoreCase(CodeProgramType.DIDE_SHELL.name(), cmd) @@ -208,9 +219,9 @@ public String getParaValue() { return getShellParaValue(); } - if (ListUtils.emptyIfNull(specNode.getScript().getParameters()).stream() + if (ListUtils.emptyIfNull(delegate.getScript().getParameters()).stream() .anyMatch(v -> VariableType.NO_KV_PAIR_EXPRESSION.equals(v.getType()))) { - return ListUtils.emptyIfNull(specNode.getScript().getParameters()).stream() + return ListUtils.emptyIfNull(delegate.getScript().getParameters()).stream() .filter(v -> VariableType.NO_KV_PAIR_EXPRESSION.equals(v.getType())) .findAny() .map(SpecVariable::getValue).orElse(null); @@ -221,7 +232,7 @@ public String getParaValue() { } private String getKvParaValue() { - return Optional.ofNullable(specNode).map(SpecNode::getScript).map(SpecScript::getParameters) + return Optional.ofNullable(delegate.getScript()).map(SpecScript::getParameters) .map(parameters -> parameters.stream() .filter(v -> v.getReferenceVariable() == null) .map(p -> p.getName() + "=" + p.getValue()).collect(Collectors.joining(" "))) @@ -229,7 +240,7 @@ private String getKvParaValue() { } private String getShellParaValue() { - return ListUtils.emptyIfNull(specNode.getScript().getParameters()).stream() + return ListUtils.emptyIfNull(delegate.getScript().getParameters()).stream() .sorted(Comparator.comparing(SpecVariable::getName)) .filter(v -> v.getReferenceVariable() == null && v.getValue() != null) .map(SpecVariable::getValue).collect(Collectors.joining(" ")); @@ -238,6 +249,7 @@ private String getShellParaValue() { @Override public Map getExtConfig() { final Map extConfig = new HashMap<>(); + SpecNode specNode = (SpecNode)delegate.getObject(); Optional.ofNullable(specNode.getTimeout()).filter(timeout -> timeout > 0).ifPresent(timeout -> extConfig.put(TIMEOUT, specNode.getTimeout())); @@ -250,11 +262,21 @@ public Map getExtConfig() { Optional.ofNullable(specNode.getForeach()).map(SpecForEach::getMaxIterations).ifPresent(maxIterations -> extConfig.put(LOOP_COUNT, maxIterations)); + Optional.ofNullable(specNode.getTrigger()).map(SpecTrigger::getDelaySeconds).ifPresent(delaySeconds -> + extConfig.put(DELAY_SECONDS, delaySeconds)); + + Optional.ofNullable(specNode.getScript()).map(SpecScript::getRuntime) + .map(SpecScriptRuntime::getStreamJobConfig) + .map(emrJobConfig -> emrJobConfig.get(STREAM_LAUNCH_MODE)) + .map(String::valueOf).filter(StringUtils::isNumeric) + .map(Integer::valueOf) + .ifPresent(i -> extConfig.put(STREAM_LAUNCH_MODE, i)); return extConfig; } @Override public Integer getNodeType() { + SpecNode specNode = (SpecNode)delegate.getObject(); if (Optional.ofNullable(specNode.getTrigger()).map(SpecTrigger::getType).map(TriggerType.MANUAL::equals).orElse(false)) { return NODE_TYPE_MANUAL; } @@ -274,8 +296,8 @@ public Integer getNodeType() { @Override public Integer getPrgType(Function getNodeTypeByName) { - SpecScriptRuntime runtime = Optional.ofNullable(specNode).map(SpecNode::getScript).map(SpecScript::getRuntime).orElseThrow( - () -> new SpecException("node runtime info not found: " + Optional.ofNullable(specNode).map(SpecNode::getScript).orElse(null))); + SpecScriptRuntime runtime = Optional.ofNullable(delegate.getScript()).map(SpecScript::getRuntime) + .orElseThrow(() -> new SpecException("node runtime info not found: " + delegate.getScript())); return Optional.ofNullable(runtime.getCommandTypeId()) .orElseGet(() -> Optional.ofNullable(runtime.getCommand()) @@ -292,4 +314,4 @@ public void setContext(Context context) { public Context getContext() { return this.context; } -} +} \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeCodeAdapter.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeCodeAdapter.java index 477b8ec..a92c8af 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeCodeAdapter.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeCodeAdapter.java @@ -25,6 +25,7 @@ import java.util.stream.Collectors; import com.aliyun.dataworks.common.spec.domain.SpecConstants; +import com.aliyun.dataworks.common.spec.domain.SpecRefEntity; import com.aliyun.dataworks.common.spec.domain.dw.codemodel.Code; import com.aliyun.dataworks.common.spec.domain.dw.codemodel.CodeModel; import com.aliyun.dataworks.common.spec.domain.dw.codemodel.CodeModelFactory; @@ -73,23 +74,23 @@ public class DataWorksNodeCodeAdapter implements DataWorksNodeAdapterContextAwar private static final String LOGIC_OR = "or"; private static final List JOIN_BRANCH_LOGICS = Arrays.asList(LOGIC_OR, LOGIC_AND); - private final SpecNode specNode; + private final SpecEntityDelegate delegate; private Context context; - public DataWorksNodeCodeAdapter(SpecNode specNode) { - this.specNode = specNode; + public DataWorksNodeCodeAdapter(SpecRefEntity entity) { + this.delegate = new SpecEntityDelegate<>(entity); } public String getCode() { - SpecScript script = Optional.ofNullable(specNode).map(SpecNode::getScript).orElseThrow( - () -> new SpecException(SpecErrorCode.PARSE_ERROR, "node.script is null")); + SpecScript script = Optional.ofNullable(delegate.getScript()).orElseThrow( + () -> new SpecException(SpecErrorCode.PARSE_ERROR, "node.script is null")); SpecScriptRuntime runtime = Optional.ofNullable(script.getRuntime()).orElseThrow( () -> new SpecException(SpecErrorCode.PARSE_ERROR, "node.script.runtime is null")); try { String command = runtime.getCommand(); - CodeModel codeModel = CodeModelFactory.getCodeModel(command, specNode.getScript().getContent()); + CodeModel codeModel = CodeModelFactory.getCodeModel(command, null); Code code = codeModel.getCodeModel(); Class codeClass = code.getClass(); @@ -99,11 +100,11 @@ public String getCode() { } if (ControllerBranchCode.class.equals(codeClass)) { - return getControllerBranchCode(specNode, script); + return getControllerBranchCode((SpecNode)delegate.getObject(), script); } if (ControllerJoinCode.class.equals(codeClass)) { - return getControllerJoinCode(specNode); + return getControllerJoinCode((SpecNode)delegate.getObject()); } if (DataIntegrationCode.class.equals(codeClass)) { @@ -115,7 +116,7 @@ public String getCode() { } if (ComponentSqlCode.class.equals(codeClass)) { - return getComponentSqlCode(specNode, script); + return getComponentSqlCode((SpecNode)delegate.getObject(), script); } // common default logic to get content diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeInputOutputAdapter.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeInputOutputAdapter.java index 73bcc96..622da62 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeInputOutputAdapter.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeInputOutputAdapter.java @@ -23,6 +23,7 @@ import com.alibaba.fastjson2.JSON; import com.aliyun.dataworks.common.spec.domain.DataWorksWorkflowSpec; +import com.aliyun.dataworks.common.spec.domain.SpecRefEntity; import com.aliyun.dataworks.common.spec.domain.Specification; import com.aliyun.dataworks.common.spec.domain.enums.ArtifactType; import com.aliyun.dataworks.common.spec.domain.enums.DependencyType; @@ -35,6 +36,7 @@ import com.aliyun.dataworks.common.spec.domain.ref.SpecNodeOutput; import com.aliyun.dataworks.common.spec.domain.ref.SpecScript; import com.aliyun.dataworks.common.spec.domain.ref.SpecVariable; +import com.aliyun.dataworks.common.spec.domain.ref.SpecWorkflow; import com.aliyun.dataworks.common.spec.exception.SpecErrorCode; import com.aliyun.dataworks.common.spec.exception.SpecException; import com.google.common.base.Joiner; @@ -54,28 +56,58 @@ public class DataWorksNodeInputOutputAdapter { private static final Logger log = LoggerFactory.getLogger(DataWorksNodeInputOutputAdapter.class); protected final Specification spec; - protected final DataWorksWorkflowSpec specification; - protected final SpecNode specNode; + private final SpecEntityDelegate objectDelegate; - public DataWorksNodeInputOutputAdapter(Specification specification, SpecNode specNode) { + public DataWorksNodeInputOutputAdapter(Specification specification, SpecRefEntity entity) { this.spec = specification; - this.specification = this.spec.getSpec(); - this.specNode = specNode; + this.objectDelegate = new SpecEntityDelegate<>(entity); } public List getInputs() { - SpecNode outerNode = ListUtils.emptyIfNull(specification.getNodes()).stream() - // current SpecNode is inner node of other node + List nodes = ListUtils.emptyIfNull(Optional.ofNullable(spec) + .map(Specification::getSpec).map(DataWorksWorkflowSpec::getNodes).orElse(null)); + + // current SpecNode is inner node of normal node + SpecNode outerNode = nodes.stream() .filter(node -> ListUtils.emptyIfNull(node.getInnerNodes()).stream() - .anyMatch(innerNode -> StringUtils.equals(innerNode.getId(), specNode.getId()))) + .anyMatch(innerNode -> StringUtils.equals(innerNode.getId(), objectDelegate.getId()))) .findAny().orElse(null); if (outerNode != null) { - return getInputList(outerNode.getInnerFlow(), outerNode.getInnerNodes(), specNode); + return getInputList(outerNode.getInnerDependencies(), outerNode.getInnerNodes(), objectDelegate); + } + + // current node is inner node of workflow + SpecWorkflow outerWorkflow = Optional.ofNullable(spec) + .map(Specification::getSpec) + .map(DataWorksWorkflowSpec::getWorkflows).flatMap(wfs -> + wfs.stream().filter(wf -> + ListUtils.emptyIfNull(wf.getNodes()).stream().anyMatch(n -> + StringUtils.equalsIgnoreCase(n.getId(), objectDelegate.getId()))).findFirst()) + .orElse(null); + if (outerWorkflow != null) { + return getInputList(outerWorkflow.getDependencies(), outerWorkflow.getNodes(), objectDelegate); } - return getInputList(specification.getFlow(), specification.getNodes(), specNode); + + // current node is inner node of the inner node of workflow node + SpecNode container = Optional.ofNullable(spec) + .map(Specification::getSpec) + .map(DataWorksWorkflowSpec::getWorkflows) + .map(wfs -> wfs.stream().map(wf -> ListUtils.emptyIfNull(wf.getNodes())).collect(Collectors.toList())) + .orElse(ListUtils.emptyIfNull(null)) + .stream() + .flatMap(List::stream) + .filter(containerNode -> ListUtils.emptyIfNull(containerNode.getInnerNodes()).stream() + .anyMatch(n -> StringUtils.equalsIgnoreCase(n.getId(), objectDelegate.getId()))) + .findAny().orElse(null); + if (container != null) { + return getInputList(container.getInnerDependencies(), container.getInnerNodes(), objectDelegate); + } + + return getInputList(Optional.ofNullable(spec).map(Specification::getSpec).map(DataWorksWorkflowSpec::getFlow).orElse(null), nodes, + objectDelegate); } - private List getInputList(List flow, List allNodes, SpecNode node) { + private List getInputList(List flow, List allNodes, SpecEntityDelegate node) { List inputs = ListUtils.emptyIfNull(node.getInputs()).stream() .filter(o -> o instanceof SpecNodeOutput) .map(o -> (SpecArtifact)o) @@ -123,7 +155,7 @@ private List getInputList(List flow, List allNo } public List getOutputs() { - return ListUtils.emptyIfNull(specNode.getOutputs()).stream() + return ListUtils.emptyIfNull(objectDelegate.getOutputs()).stream() .filter(o -> o instanceof SpecArtifact) .map(o -> (SpecArtifact)o) .filter(o -> ArtifactType.NODE_OUTPUT.equals(o.getArtifactType())) @@ -131,7 +163,7 @@ public List getOutputs() { } public List getInputContexts() { - return ListUtils.emptyIfNull(specNode.getInputs()).stream() + return ListUtils.emptyIfNull(objectDelegate.getInputs()).stream() .filter(i -> i instanceof SpecArtifact) .filter(i -> ArtifactType.VARIABLE.equals(((SpecArtifact)i).getArtifactType())) .map(i -> (SpecVariable)i) @@ -144,7 +176,7 @@ public List getInputContexts() { } private String getInputContextKey(SpecVariable i) { - return Optional.ofNullable(specNode.getScript()).map(SpecScript::getParameters) + return Optional.ofNullable(objectDelegate.getScript()).map(SpecScript::getParameters) .map(params -> params.stream() .filter(param -> param.getReferenceVariable() != null) .filter(param -> matchVariable(i, param.getReferenceVariable())) @@ -196,7 +228,7 @@ private static String getIoContextRefKey(SpecVariable i, boolean isOutput) { } public List getOutputContexts() { - return ListUtils.emptyIfNull(specNode.getOutputs()).stream() + return ListUtils.emptyIfNull(objectDelegate.getOutputs()).stream() .filter(i -> i instanceof SpecArtifact) .filter(i -> ArtifactType.VARIABLE.equals(((SpecArtifact)i).getArtifactType())) .map(i -> (SpecVariable)i) diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/SpecEntityDelegate.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/SpecEntityDelegate.java new file mode 100644 index 0000000..0562b88 --- /dev/null +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/SpecEntityDelegate.java @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2024, Alibaba Cloud; + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.aliyun.dataworks.common.spec.domain.dw.nodemodel; + +import java.util.Collections; +import java.util.List; + +import com.aliyun.dataworks.common.spec.domain.SpecRefEntity; +import com.aliyun.dataworks.common.spec.domain.interfaces.Input; +import com.aliyun.dataworks.common.spec.domain.interfaces.Output; +import com.aliyun.dataworks.common.spec.domain.ref.InputOutputWired; +import com.aliyun.dataworks.common.spec.domain.ref.ScriptWired; +import com.aliyun.dataworks.common.spec.domain.ref.SpecScript; +import lombok.Getter; +import org.apache.commons.collections4.ListUtils; + +/** + * @author 聿剑 + * @date 2024/7/17 + */ +@Getter +public class SpecEntityDelegate implements InputOutputWired, ScriptWired { + private final T object; + + public SpecEntityDelegate(T object) { + this.object = object; + } + + public String getId() { + return object.getId(); + } + + public SpecScript getScript() { + return object instanceof ScriptWired ? ((ScriptWired)object).getScript() : null; + } + + @Override + public List getInputs() { + return object instanceof InputOutputWired ? + Collections.unmodifiableList(ListUtils.emptyIfNull(((InputOutputWired)object).getInputs())) : Collections.emptyList(); + } + + @Override + public List getOutputs() { + return object instanceof InputOutputWired ? + Collections.unmodifiableList(ListUtils.emptyIfNull(((InputOutputWired)object).getOutputs())) : Collections.emptyList(); + } +} diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/enums/FailureStrategy.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/enums/FailureStrategy.java new file mode 100644 index 0000000..13a54ba --- /dev/null +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/enums/FailureStrategy.java @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2024, Alibaba Cloud; + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.aliyun.dataworks.common.spec.domain.enums; + +import com.aliyun.dataworks.common.spec.domain.interfaces.LabelEnum; + +/** + * @author 聿剑 + * @date 2024/07/09 + */ +public enum FailureStrategy implements LabelEnum { + + /** + * Continue + */ + CONTINUE("Continue"), + + /** + * Break + */ + BREAK("Break"); + + private final String label; + + FailureStrategy(String label) { + this.label = label; + } + + @Override + public String getLabel() { + return label; + } +} \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/enums/SpecKind.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/enums/SpecKind.java index 634e270..75981f2 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/enums/SpecKind.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/enums/SpecKind.java @@ -22,7 +22,6 @@ * @date 2023/7/4 */ public enum SpecKind implements LabelEnum { - /** * Cycle scheduling workflow */ @@ -76,7 +75,19 @@ public enum SpecKind implements LabelEnum { /** * Component */ - COMPONENT("Component"); + COMPONENT("Component"), + /** + * Resource + */ + RESOURCE("Resource"), + /** + * Function + */ + FUNCTION("Function"), + /** + * Workflow,新Workflow + */ + WORKFLOW("Workflow"); private final String label; @@ -88,5 +99,4 @@ public enum SpecKind implements LabelEnum { public String getLabel() { return label; } - } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/enums/SpecVersion.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/enums/SpecVersion.java index 8387a20..684b4f1 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/enums/SpecVersion.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/enums/SpecVersion.java @@ -33,7 +33,10 @@ public enum SpecVersion implements LabelEnum { * v1.1.0 */ V_1_1_0("1", "1", "0"), - ; + /** + * v1.2.0 + */ + V_1_2_0("1", "2", "0"); private final String major; private final String minor; private final String patch; diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecAnd.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecAnd.java index 4e1e8a3..042626b 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecAnd.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecAnd.java @@ -20,14 +20,16 @@ import com.aliyun.dataworks.common.spec.domain.SpecNoRefEntity; import com.aliyun.dataworks.common.spec.domain.ref.SpecNode; import lombok.Data; +import lombok.EqualsAndHashCode; /** - * @deprecated - * @see SpecJoin - * @see SpecBranches * @author yiwei.qyw * @date 2023/7/4 + * @see SpecJoin + * @see SpecBranches + * @deprecated */ +@EqualsAndHashCode(callSuper = true) @Data public class SpecAnd extends SpecNoRefEntity { private SpecNode nodeId; diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecAssertion.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecAssertion.java index de6c6ea..4eee94c 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecAssertion.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecAssertion.java @@ -17,6 +17,7 @@ import com.aliyun.dataworks.common.spec.domain.SpecNoRefEntity; import lombok.Data; +import lombok.EqualsAndHashCode; /** * assertion define @@ -24,6 +25,7 @@ * @author 聿剑 * @date 2023/10/25 */ +@EqualsAndHashCode(callSuper = true) @Data public class SpecAssertion extends SpecNoRefEntity { private String field; diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecFlowDepend.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecFlowDepend.java index 103bd75..f8341b2 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecFlowDepend.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecFlowDepend.java @@ -29,6 +29,8 @@ @EqualsAndHashCode(callSuper = true) @Data public class SpecFlowDepend extends SpecNoRefEntity { + @EqualsAndHashCode.Include private SpecNode nodeId; + @EqualsAndHashCode.Include private List depends; } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecCombined.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecSubFlow.java similarity index 82% rename from spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecCombined.java rename to spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecSubFlow.java index d91bc79..9101dad 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecCombined.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/noref/SpecSubFlow.java @@ -20,15 +20,17 @@ import com.aliyun.dataworks.common.spec.domain.SpecNoRefEntity; import com.aliyun.dataworks.common.spec.domain.ref.SpecNode; import lombok.Data; +import lombok.EqualsAndHashCode; /** - * 组合节点Spec对象定义 + * 子Workflow Spec对象定义 * * @author sam.liux * @date 2023/10/25 */ +@EqualsAndHashCode(callSuper = true) @Data -public class SpecCombined extends SpecNoRefEntity { +public class SpecSubFlow extends SpecNoRefEntity { private List nodes; - private List flow; + private List dependencies; } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/ContainerNode.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/Container.java similarity index 93% rename from spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/ContainerNode.java rename to spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/Container.java index 076a2b6..c7afc62 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/ContainerNode.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/Container.java @@ -25,7 +25,7 @@ * @author 聿剑 * @date 2024/5/2 */ -public interface ContainerNode { +public interface Container { /** * get inner nodes @@ -39,5 +39,5 @@ public interface ContainerNode { * * @return List of spec flow depend */ - List getInnerFlow(); + List getInnerDependencies(); } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/InputOutputWired.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/InputOutputWired.java new file mode 100644 index 0000000..122a722 --- /dev/null +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/InputOutputWired.java @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2024, Alibaba Cloud; + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.aliyun.dataworks.common.spec.domain.ref; + +import java.util.List; + +import com.aliyun.dataworks.common.spec.domain.interfaces.Input; +import com.aliyun.dataworks.common.spec.domain.interfaces.Output; + +/** + * @author 聿剑 + * @date 2024/7/2 + */ +public interface InputOutputWired { + List getInputs(); + + List getOutputs(); +} diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/adapter/SpecNodeAdapter.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/ScriptWired.java similarity index 75% rename from spec/src/main/java/com/aliyun/dataworks/common/spec/domain/adapter/SpecNodeAdapter.java rename to spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/ScriptWired.java index 20e43a3..5c705de 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/adapter/SpecNodeAdapter.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/ScriptWired.java @@ -13,14 +13,12 @@ * limitations under the License. */ -package com.aliyun.dataworks.common.spec.domain.adapter; - -import com.aliyun.dataworks.common.spec.domain.ref.SpecNode; +package com.aliyun.dataworks.common.spec.domain.ref; /** * @author 聿剑 - * @date 2023/11/9 + * @date 2024/7/2 */ -public interface SpecNodeAdapter { - SpecNode getSpecNode(); +public interface ScriptWired { + SpecScript getScript(); } diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecDatasource.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecDatasource.java index bf7b315..205fcec 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecDatasource.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecDatasource.java @@ -16,6 +16,7 @@ package com.aliyun.dataworks.common.spec.domain.ref; import com.aliyun.dataworks.common.spec.domain.SpecRefEntity; +import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; import lombok.ToString; @@ -29,6 +30,7 @@ @Getter @Setter @ToString(callSuper = true) +@EqualsAndHashCode(callSuper = true) public class SpecDatasource extends SpecRefEntity { /** * Datasource name diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecFileResource.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecFileResource.java index ce4a9d2..4e11009 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecFileResource.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecFileResource.java @@ -18,6 +18,7 @@ import com.aliyun.dataworks.common.spec.domain.SpecRefEntity; import com.aliyun.dataworks.common.spec.domain.enums.SpecFileResourceType; import com.aliyun.dataworks.common.spec.domain.ref.file.SpecObjectStorageFile; +import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; import lombok.ToString; @@ -29,7 +30,8 @@ @Getter @Setter @ToString(callSuper = true) -public class SpecFileResource extends SpecRefEntity { +@EqualsAndHashCode(callSuper = true) +public class SpecFileResource extends SpecRefEntity implements ScriptWired { /** * Resource name */ @@ -37,10 +39,12 @@ public class SpecFileResource extends SpecRefEntity { /** * Resource config script */ + @EqualsAndHashCode.Include private SpecScript script; /** * Runtime resource for file resource register to calculation engine */ + @EqualsAndHashCode.Include private SpecRuntimeResource runtimeResource; /** * Resource type @@ -49,9 +53,11 @@ public class SpecFileResource extends SpecRefEntity { /** * Resource file storage */ + @EqualsAndHashCode.Include private SpecObjectStorageFile file; /** * Resource calculate engine datasource */ + @EqualsAndHashCode.Include private SpecDatasource datasource; } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecFunction.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecFunction.java index b984d01..a9f65c9 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecFunction.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecFunction.java @@ -21,6 +21,7 @@ import com.aliyun.dataworks.common.spec.domain.enums.FunctionType; import com.aliyun.dataworks.common.spec.domain.enums.SpecEmbeddedCodeType; import com.aliyun.dataworks.common.spec.domain.enums.SpecEmbeddedResourceType; +import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; @@ -30,13 +31,18 @@ */ @Getter @Setter -public class SpecFunction extends SpecRefEntity { +@EqualsAndHashCode(callSuper = true) +public class SpecFunction extends SpecRefEntity implements ScriptWired { private String name; + @EqualsAndHashCode.Include private SpecScript script; private FunctionType type; private String className; + @EqualsAndHashCode.Include private SpecDatasource datasource; + @EqualsAndHashCode.Include private SpecRuntimeResource runtimeResource; + @EqualsAndHashCode.Include private List fileResources; private String armResource; private String usageDescription; diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecNode.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecNode.java index 187bdde..9fbcd95 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecNode.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecNode.java @@ -20,6 +20,8 @@ import java.util.List; import java.util.Optional; +import com.alibaba.fastjson2.annotation.JSONField; + import com.aliyun.dataworks.common.spec.domain.SpecRefEntity; import com.aliyun.dataworks.common.spec.domain.enums.NodeInstanceModeType; import com.aliyun.dataworks.common.spec.domain.enums.NodeRecurrenceType; @@ -27,13 +29,13 @@ import com.aliyun.dataworks.common.spec.domain.interfaces.Input; import com.aliyun.dataworks.common.spec.domain.interfaces.Output; import com.aliyun.dataworks.common.spec.domain.noref.SpecBranch; -import com.aliyun.dataworks.common.spec.domain.noref.SpecCombined; import com.aliyun.dataworks.common.spec.domain.noref.SpecDoWhile; import com.aliyun.dataworks.common.spec.domain.noref.SpecFlowDepend; import com.aliyun.dataworks.common.spec.domain.noref.SpecForEach; import com.aliyun.dataworks.common.spec.domain.noref.SpecJoin; import com.aliyun.dataworks.common.spec.domain.noref.SpecNodeRef; import com.aliyun.dataworks.common.spec.domain.noref.SpecParamHub; +import com.aliyun.dataworks.common.spec.domain.noref.SpecSubFlow; import com.aliyun.dataworks.common.spec.domain.ref.component.SpecComponent; import lombok.Data; import lombok.EqualsAndHashCode; @@ -45,7 +47,7 @@ */ @EqualsAndHashCode(callSuper = true) @Data -public class SpecNode extends SpecRefEntity implements ContainerNode { +public class SpecNode extends SpecRefEntity implements Container, InputOutputWired, ScriptWired { private NodeRecurrenceType recurrence; private Integer priority; @@ -65,34 +67,48 @@ public class SpecNode extends SpecRefEntity implements ContainerNode { */ private Boolean ignoreBranchConditionSkip; + @EqualsAndHashCode.Include private SpecDatasource datasource; + @EqualsAndHashCode.Include private SpecScript script; + @EqualsAndHashCode.Include private SpecTrigger trigger; + @EqualsAndHashCode.Include private SpecRuntimeResource runtimeResource; + @EqualsAndHashCode.Include private List fileResources; + @EqualsAndHashCode.Include private List functions; + @EqualsAndHashCode.Include private List inputs; + @EqualsAndHashCode.Include private List outputs; private SpecNodeRef reference; + @EqualsAndHashCode.Include private SpecBranch branch; + @EqualsAndHashCode.Include private SpecJoin join; + @EqualsAndHashCode.Include private SpecDoWhile doWhile; + @EqualsAndHashCode.Include private SpecForEach foreach; - private SpecCombined combined; + @EqualsAndHashCode.Include + private SpecSubFlow combined; + @EqualsAndHashCode.Include private SpecParamHub paramHub; private String name; @@ -101,23 +117,40 @@ public class SpecNode extends SpecRefEntity implements ContainerNode { private String description; + @EqualsAndHashCode.Include private SpecComponent component; + @EqualsAndHashCode.Include + private SpecScheduleStrategy strategy; + + @EqualsAndHashCode.Include + private SpecSubFlow subflow; + @Override + @JSONField(serialize = false) public List getInnerNodes() { List nodes = new ArrayList<>(); - Optional.ofNullable(doWhile).ifPresent(dw -> { - Optional.ofNullable(dw.getSpecWhile()).ifPresent(nodes::add); - nodes.addAll(ListUtils.emptyIfNull(dw.getNodes())); - }); - - Optional.ofNullable(foreach).ifPresent(fe -> nodes.addAll(ListUtils.emptyIfNull(foreach.getNodes()))); - Optional.ofNullable(combined).ifPresent(cb -> nodes.addAll(ListUtils.emptyIfNull(cb.getNodes()))); - return Collections.unmodifiableList(nodes); + if (subflow != null) { + Optional.of(subflow).ifPresent(sub -> nodes.addAll(ListUtils.emptyIfNull(sub.getNodes()))); + } else if (doWhile != null) { + Optional.of(doWhile).ifPresent(dw -> { + Optional.ofNullable(dw.getSpecWhile()).ifPresent(nodes::add); + nodes.addAll(ListUtils.emptyIfNull(dw.getNodes())); + }); + } else if (foreach != null) { + Optional.of(foreach).ifPresent(fe -> nodes.addAll(ListUtils.emptyIfNull(foreach.getNodes()))); + } else if (combined != null) { + Optional.of(combined).ifPresent(cb -> nodes.addAll(ListUtils.emptyIfNull(cb.getNodes()))); + } + return Collections.unmodifiableList(ListUtils.emptyIfNull(nodes)); } @Override - public List getInnerFlow() { + public List getInnerDependencies() { + if (subflow != null) { + return subflow.getDependencies(); + } + if (doWhile != null) { return doWhile.getFlow(); } @@ -127,7 +160,7 @@ public List getInnerFlow() { } if (combined != null) { - return combined.getFlow(); + return combined.getDependencies(); } return null; diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecRuntimeResource.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecRuntimeResource.java index 463dbeb..2d254d2 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecRuntimeResource.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecRuntimeResource.java @@ -31,5 +31,6 @@ public class SpecRuntimeResource extends SpecRefEntity { /** * 运行时资源,指定cu数 */ + @Deprecated private String cu; } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecScheduleStrategy.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecScheduleStrategy.java new file mode 100644 index 0000000..6c21e86 --- /dev/null +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecScheduleStrategy.java @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2024, Alibaba Cloud; + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.aliyun.dataworks.common.spec.domain.ref; + +import com.aliyun.dataworks.common.spec.domain.enums.FailureStrategy; +import com.aliyun.dataworks.common.spec.domain.enums.NodeInstanceModeType; +import com.aliyun.dataworks.common.spec.domain.enums.NodeRerunModeType; +import lombok.Data; +import lombok.EqualsAndHashCode; + +/** + * Schedule strategy + * + * @author 聿剑 + * @date 2024/7/8 + */ +@Data +@EqualsAndHashCode +public class SpecScheduleStrategy { + private Integer priority; + + private Integer timeout; + + private NodeInstanceModeType instanceMode; + + private NodeRerunModeType rerunMode; + + private Integer rerunTimes; + + private Integer rerunInterval; + + /** + * 是否忽略分支条件跳过 + */ + private Boolean ignoreBranchConditionSkip; + + /** + * 失败策略 + */ + private FailureStrategy failureStrategy; +} diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecScript.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecScript.java index 54dad07..8fa42e3 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecScript.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecScript.java @@ -35,10 +35,12 @@ public class SpecScript extends SpecFile { /** * Script runtime configuration */ + @EqualsAndHashCode.Include private SpecScriptRuntime runtime; /** * Script parameters */ + @EqualsAndHashCode.Include private List parameters; /** * Script content diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecTable.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecTable.java index 7c11a4a..d566ec9 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecTable.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecTable.java @@ -39,5 +39,6 @@ public SpecTable() { private String ddl; private Boolean hasPartition; private Boolean isVisible; + @EqualsAndHashCode.Include private SpecCalcEngine calcEngine; } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecTrigger.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecTrigger.java index a66a376..5310c76 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecTrigger.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecTrigger.java @@ -16,18 +16,23 @@ package com.aliyun.dataworks.common.spec.domain.ref; import com.aliyun.dataworks.common.spec.domain.SpecRefEntity; +import com.aliyun.dataworks.common.spec.domain.enums.NodeRecurrenceType; import com.aliyun.dataworks.common.spec.domain.enums.TriggerType; import lombok.Data; +import lombok.EqualsAndHashCode; /** * @author yiwei.qyw * @date 2023/7/4 */ +@EqualsAndHashCode(callSuper = true) @Data public class SpecTrigger extends SpecRefEntity { private TriggerType type; private String cron; + private NodeRecurrenceType recurrence; private String startTime; private String endTime; private String timezone; + private Integer delaySeconds; } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecVariable.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecVariable.java index dc92fa3..5378fc2 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecVariable.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecVariable.java @@ -43,8 +43,10 @@ public SpecVariable() { private String description; @Exclude @JSONField(serialize = false) + @EqualsAndHashCode.Exclude private SpecVariable referenceVariable; @Exclude + @EqualsAndHashCode.Exclude @JSONField(serialize = false) private SpecDepend node; } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecWorkflow.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecWorkflow.java new file mode 100644 index 0000000..e4c4e1a --- /dev/null +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/SpecWorkflow.java @@ -0,0 +1,75 @@ + +/* + * Copyright (c) 2024, Alibaba Cloud; + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.aliyun.dataworks.common.spec.domain.ref; + +import java.util.Collections; +import java.util.List; + +import com.aliyun.dataworks.common.spec.domain.SpecRefEntity; +import com.aliyun.dataworks.common.spec.domain.interfaces.Input; +import com.aliyun.dataworks.common.spec.domain.interfaces.Output; +import com.aliyun.dataworks.common.spec.domain.noref.SpecFlowDepend; +import lombok.Data; +import lombok.EqualsAndHashCode; +import org.apache.commons.collections4.ListUtils; + +/** + * the top level concept of dataworks Workflow + * + * @author 聿剑 + * @date 2024/07/02 + */ +@EqualsAndHashCode(callSuper = true) +@Data +public class SpecWorkflow extends SpecRefEntity implements Container, InputOutputWired, ScriptWired { + @EqualsAndHashCode.Include + private SpecScript script; + + @EqualsAndHashCode.Include + private SpecTrigger trigger; + + @EqualsAndHashCode.Include + private List inputs; + + @EqualsAndHashCode.Include + private List outputs; + + @EqualsAndHashCode.Include + private SpecScheduleStrategy strategy; + + @EqualsAndHashCode.Include + private List nodes; + + @EqualsAndHashCode.Include + private List dependencies; + + private String name; + + private String owner; + + private String description; + + @Override + public List getInnerNodes() { + return Collections.unmodifiableList(ListUtils.emptyIfNull(nodes)); + } + + @Override + public List getInnerDependencies() { + return Collections.unmodifiableList(ListUtils.emptyIfNull(dependencies)); + } +} \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/runtime/SpecScriptRuntime.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/runtime/SpecScriptRuntime.java index 916ea77..fdbce7d 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/runtime/SpecScriptRuntime.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/ref/runtime/SpecScriptRuntime.java @@ -17,10 +17,9 @@ import java.util.Map; -import com.aliyun.dataworks.common.spec.domain.dw.types.CodeProgramType; -import com.aliyun.dataworks.common.spec.domain.ref.runtime.container.SpecContainer; import com.aliyun.dataworks.common.spec.domain.SpecNoRefEntity; import com.aliyun.dataworks.common.spec.domain.dw.types.CodeProgramType; +import com.aliyun.dataworks.common.spec.domain.ref.runtime.container.SpecContainer; import lombok.Data; import lombok.EqualsAndHashCode; @@ -64,9 +63,16 @@ public class SpecScriptRuntime extends SpecNoRefEntity { * flink configurations */ private Map flinkConf; - + /** + * streaming job config + */ + private Map streamJobConfig; /** * runtime container info */ private SpecContainer container; + /** + * runtime resource info + */ + private String cu; } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/specification/DataWorksNodeSpec.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/specification/DataWorksNodeSpec.java deleted file mode 100644 index ac2d160..0000000 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/domain/specification/DataWorksNodeSpec.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2024, Alibaba Cloud; - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.aliyun.dataworks.common.spec.domain.specification; - -import java.util.Collections; -import java.util.List; - -import com.aliyun.dataworks.common.spec.domain.Spec; -import com.aliyun.dataworks.common.spec.domain.SpecNoRefEntity; -import com.aliyun.dataworks.common.spec.domain.enums.SpecKind; -import com.aliyun.dataworks.common.spec.domain.noref.SpecFlowDepend; -import com.aliyun.dataworks.common.spec.domain.ref.SpecNode; -import lombok.Data; -import lombok.EqualsAndHashCode; - -/** - * @author 聿剑 - * @date 2024/5/20 - */ -@EqualsAndHashCode(callSuper = true) -@Data -public class DataWorksNodeSpec extends SpecNoRefEntity implements Spec { - private SpecNode node; - private SpecFlowDepend flow; - - @Override - public List getKinds() { - return Collections.singletonList(SpecKind.NODE); - } -} diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/Parser.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/Parser.java index 8800c77..70dd341 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/Parser.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/Parser.java @@ -16,6 +16,10 @@ package com.aliyun.dataworks.common.spec.parser; import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** * Define a parser for domain classes @@ -41,4 +45,8 @@ public interface Parser { default String getKeyType() { return null; } + + default Set getKeyTypes() { + return Stream.of(getKeyType()).filter(Objects::nonNull).collect(Collectors.toSet()); + } } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/SpecParserFactory.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/SpecParserFactory.java index d8b3a44..a2f091d 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/SpecParserFactory.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/SpecParserFactory.java @@ -25,6 +25,7 @@ import com.aliyun.dataworks.common.spec.exception.SpecException; import com.aliyun.dataworks.common.spec.parser.impl.DefaultSpecParser; import com.aliyun.dataworks.common.spec.parser.impl.SpecParser; +import org.apache.commons.collections4.CollectionUtils; import org.reflections.Reflections; /** @@ -51,10 +52,8 @@ public class SpecParserFactory { continue; } - if (parser.getKeyType() != null) { - parserMap.put(parser.getKeyType(), parser); - } - + CollectionUtils.emptyIfNull(parser.getKeyTypes()).forEach(key -> parserMap.put(key, parser)); + // set Parser‘s Class type to parserMap Arrays.stream(parser.getClass().getGenericInterfaces()) .filter(type -> type instanceof ParameterizedType) diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/DataWorksNodeSpecParser.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/DataWorksNodeSpecParser.java deleted file mode 100644 index b7380c4..0000000 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/DataWorksNodeSpecParser.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2024, Alibaba Cloud; - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.aliyun.dataworks.common.spec.parser.impl; - -import java.util.Map; - -import com.aliyun.dataworks.common.spec.annotation.SpecParser; -import com.aliyun.dataworks.common.spec.domain.specification.DataWorksNodeSpec; -import com.aliyun.dataworks.common.spec.parser.SpecParserContext; -import com.aliyun.dataworks.common.spec.utils.SpecDevUtil; - -/** - * @author 聿剑 - * @date 2023/11/16 - */ -@SpecParser -public class DataWorksNodeSpecParser extends com.aliyun.dataworks.common.spec.parser.impl.SpecParser { - @Override - public boolean support(String kind) { - return matchKinds(new DataWorksNodeSpec().getKinds(), kind); - } - - @Override - public DataWorksNodeSpec parse(Map rawContext, SpecParserContext specParserContext) { - DataWorksNodeSpec specObj = new DataWorksNodeSpec(); - specParserContext.setIgnoreMissingFields(true); - SpecDevUtil.setSimpleField(rawContext, specObj); - parseSpecObjectFields(specObj, rawContext, specParserContext); - return specObj; - } -} diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecJoinParser.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecJoinParser.java index 35eeef2..b4656d0 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecJoinParser.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecJoinParser.java @@ -18,6 +18,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Optional; import com.aliyun.dataworks.common.spec.annotation.SpecParser; import com.aliyun.dataworks.common.spec.domain.noref.SpecAssertIn; @@ -29,6 +30,7 @@ import com.aliyun.dataworks.common.spec.exception.SpecException; import com.aliyun.dataworks.common.spec.parser.SpecParserContext; import com.aliyun.dataworks.common.spec.utils.SpecDevUtil; +import org.apache.commons.collections4.MapUtils; /** * @author 聿剑 @@ -47,23 +49,15 @@ public class SpecJoinParser extends DefaultSpecParser { @SuppressWarnings("unchecked") @Override public SpecJoin parse(Map rawContext, SpecParserContext specParserContext) { - if (!rawContext.containsKey(KEY_LOGIC)) { - throw new SpecException(SpecErrorCode.PARSE_ERROR, "'" + KEY_LOGIC + "' field is required"); - } - - if (!rawContext.containsKey(KEY_BRANCHES)) { - throw new SpecException(SpecErrorCode.PARSE_ERROR, "'" + KEY_BRANCHES + "' field is required"); - } - SpecJoin specJoin = new SpecJoin(); // parse logic - Map ctxMapLogic = (Map)rawContext.get(KEY_LOGIC); SpecLogic specLogic = new SpecLogic(); specJoin.setLogic(specLogic); - specLogic.setExpression((String)ctxMapLogic.getOrDefault(KEY_LOGIC_EXPRESSION, "")); + specLogic.setExpression((String)MapUtils.emptyIfNull((Map)rawContext.get(KEY_LOGIC)) + .getOrDefault(KEY_LOGIC_EXPRESSION, "")); // parse branches - List ctxMapBranches = (List)rawContext.get(KEY_BRANCHES); + List ctxMapBranches = (List)Optional.ofNullable(rawContext.get(KEY_BRANCHES)).orElse(new ArrayList<>()); ArrayList specJoinBranches = new ArrayList<>(); for (Object o : ctxMapBranches) { Map ctxMapBranch = (Map)o; diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/NodeParser.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecNodeParser.java similarity index 95% rename from spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/NodeParser.java rename to spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecNodeParser.java index 929c1f5..d75fb8d 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/NodeParser.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecNodeParser.java @@ -47,7 +47,7 @@ * @date 2023/7/6 */ @SpecParser -public class NodeParser implements Parser { +public class SpecNodeParser implements Parser { @SuppressWarnings("unchecked") @Override public SpecNode parse(Map ctxMap, SpecParserContext specParserContext) { @@ -57,6 +57,7 @@ public SpecNode parse(Map ctxMap, SpecParserContext specParserCo SpecDevUtil.setSpecObject(specNode, "doWhile", ctxMap.get(DoWhileParser.DO_WHILE), specParserContext); SpecDevUtil.setSpecObject(specNode, "foreach", ctxMap.get(SpecForEachParser.FOREACH), specParserContext); SpecDevUtil.setSpecObject(specNode, "paramHub", ctxMap.get(SpecParamHubParser.PARAM_HUB), specParserContext); + SpecDevUtil.setSpecObject(specNode, SubFlowParser.KEY_TYPE_SUBFLOW, ctxMap.get(SubFlowParser.KEY_TYPE_SUBFLOW), specParserContext); specNode.setInputs(parseInputOutputs(specParserContext, (Map)ctxMap.get("inputs"))); specNode.setOutputs(parseInputOutputs(specParserContext, (Map)ctxMap.get("outputs"))); @@ -78,7 +79,7 @@ public SpecNode parse(Map ctxMap, SpecParserContext specParserCo public String getKeyType() {return "node";} @SuppressWarnings("unchecked") - private static ArrayList parseInputOutputs(SpecParserContext contextMeta, Map ioCtxMap) { + public static ArrayList parseInputOutputs(SpecParserContext contextMeta, Map ioCtxMap) { ArrayList ioList = new ArrayList<>(); Reflections reflections = new Reflections(SpecArtifact.class.getPackage().getName()); Set> artifactClzTypes = reflections.getSubTypesOf(SpecArtifact.class); diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecVariableParser.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecVariableParser.java index 3cc527b..c75b2ad 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecVariableParser.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecVariableParser.java @@ -78,11 +78,7 @@ private SpecVariable parseVariable(SpecParserContext contextMeta, Map variableMap, SpecVariable variable) { - String name = (String)variableMap.get(KEY_NAME); - if (StringUtils.isBlank(name)) { - throw new SpecException(SpecErrorCode.PARSE_ERROR, MessageFormat.format("{0} field of variable is required, source: {1}", - KEY_NAME, JSON.toJSONString(variableMap))); - } + String name = StringUtils.defaultString((String)variableMap.get(KEY_NAME), "unnamed"); variable.setName(name); } diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecWorkflowParser.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecWorkflowParser.java new file mode 100644 index 0000000..fe8eaa9 --- /dev/null +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SpecWorkflowParser.java @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2024, Alibaba Cloud; + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.aliyun.dataworks.common.spec.parser.impl; + +import java.util.Map; + +import com.aliyun.dataworks.common.spec.annotation.SpecParser; +import com.aliyun.dataworks.common.spec.domain.noref.SpecDepend; +import com.aliyun.dataworks.common.spec.domain.ref.SpecNode; +import com.aliyun.dataworks.common.spec.domain.ref.SpecNodeOutput; +import com.aliyun.dataworks.common.spec.domain.ref.SpecVariable; +import com.aliyun.dataworks.common.spec.domain.ref.SpecWorkflow; +import com.aliyun.dataworks.common.spec.parser.Parser; +import com.aliyun.dataworks.common.spec.parser.SpecParserContext; +import com.aliyun.dataworks.common.spec.utils.SpecDevUtil; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.collections4.ListUtils; + +/** + * @author 聿剑 + * @date 2024/7/9 + */ +@SpecParser +public class SpecWorkflowParser implements Parser { + @Override + public SpecWorkflow parse(Map ctxMap, SpecParserContext specParserContext) { + SpecWorkflow specWorkflow = new SpecWorkflow(); + + SpecDevUtil.setSameKeyField(ctxMap, specWorkflow, specParserContext); + if (CollectionUtils.isEmpty(specWorkflow.getDependencies())) { + SpecDevUtil.setSpecObject(specWorkflow, "dependencies", ctxMap.get("flow"), specParserContext); + } + + //noinspection unchecked + specWorkflow.setInputs(SpecNodeParser.parseInputOutputs(specParserContext, (Map)ctxMap.get("inputs"))); + //noinspection unchecked + specWorkflow.setOutputs(SpecNodeParser.parseInputOutputs(specParserContext, (Map)ctxMap.get("outputs"))); + ListUtils.emptyIfNull(specWorkflow.getOutputs()).stream() + .filter(out -> out instanceof SpecVariable) + .map(out -> (SpecVariable)out) + .forEach(out -> { + SpecDepend node = new SpecDepend(); + SpecNode nodeId = new SpecNode(); + nodeId.setId(specWorkflow.getId()); + node.setNodeId(nodeId); + SpecNodeOutput specOut = new SpecNodeOutput(); + specOut.setData(node.getNodeId().getId()); + node.setOutput(specOut); + out.setNode(node); + }); + return specWorkflow; + } +} \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/CombinedParser.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SubFlowParser.java similarity index 64% rename from spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/CombinedParser.java rename to spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SubFlowParser.java index a49fde8..d62c96f 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/CombinedParser.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/parser/impl/SubFlowParser.java @@ -16,30 +16,38 @@ package com.aliyun.dataworks.common.spec.parser.impl; import java.util.Map; +import java.util.Set; import com.aliyun.dataworks.common.spec.annotation.SpecParser; -import com.aliyun.dataworks.common.spec.domain.noref.SpecCombined; +import com.aliyun.dataworks.common.spec.domain.noref.SpecSubFlow; import com.aliyun.dataworks.common.spec.parser.Parser; import com.aliyun.dataworks.common.spec.parser.SpecParserContext; import com.aliyun.dataworks.common.spec.utils.SpecDevUtil; +import com.google.common.collect.Sets; /** * @author 聿剑 * @date 2023/10/25 */ @SpecParser -public class CombinedParser implements Parser { - public static final String KEY_TYPE = "combined"; +public class SubFlowParser implements Parser { + public static final String KEY_TYPE_COMBINED = "combined"; + public static final String KEY_TYPE_SUBFLOW = "subflow"; @Override - public SpecCombined parse(Map rawContext, SpecParserContext specParserContext) { - SpecCombined specCombined = new SpecCombined(); + public SpecSubFlow parse(Map rawContext, SpecParserContext specParserContext) { + SpecSubFlow specCombined = new SpecSubFlow(); SpecDevUtil.setSameKeyField(rawContext, specCombined, specParserContext); return specCombined; } @Override public String getKeyType() { - return KEY_TYPE; + return KEY_TYPE_COMBINED; + } + + @Override + public Set getKeyTypes() { + return Sets.newHashSet(KEY_TYPE_COMBINED, KEY_TYPE_COMBINED); } } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/utils/SpecDevUtil.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/utils/SpecDevUtil.java index 3761d63..d5e11ad 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/utils/SpecDevUtil.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/utils/SpecDevUtil.java @@ -22,12 +22,15 @@ import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONObject; +import com.alibaba.fastjson2.annotation.JSONField; import com.aliyun.dataworks.common.spec.domain.SpecRefEntity; import com.aliyun.dataworks.common.spec.domain.interfaces.LabelEnum; @@ -38,6 +41,7 @@ import com.aliyun.dataworks.common.spec.parser.SpecParserContext.SpecEntityContext; import com.aliyun.dataworks.common.spec.parser.SpecParserFactory; import lombok.Data; +import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.ClassUtils; import org.apache.commons.lang3.StringUtils; @@ -47,6 +51,7 @@ * @author yiwei.qyw * @date 2023/7/7 */ +@Slf4j @SuppressWarnings("rawtypes") public class SpecDevUtil { @@ -68,10 +73,13 @@ public static void setSpecObject(Object ownerObject, String fieldName, Object va String clzName = fieldClz.getSimpleName(); Object resEntity = null; - if (value instanceof List) { + + if (value instanceof List && List.class.isAssignableFrom(declaredField.getType())) { + // for list field resEntity = getListObj(clzName, (List)value, parserContext, declaredField); - } else if (value instanceof Map) { - resEntity = getSpecEntity(clzName, value, parserContext); + } else if (value instanceof Map && !List.class.isAssignableFrom(declaredField.getType())) { + // for no-list object field + resEntity = getSpecEntity(fieldClz, value, parserContext); } else if (value instanceof String) { // reference string setRefEntity(ownerObject, clzName, value, parserContext, declaredField); @@ -409,10 +417,15 @@ private static void setRefEntity(Object holdObject, String simpleName, Object va } @SuppressWarnings("unchecked") - private static Object getSpecEntity(String simpleName, Object value, SpecParserContext parserContext) { - Parser parser = SpecParserFactory.getParser(simpleName); + private static Object getSpecEntity(Class clz, Object value, SpecParserContext parserContext) { + Parser parser = SpecParserFactory.getParser(clz.getSimpleName()); // generate Object by Parser if (parser == null) { + try { + return JSON.parseObject(JSON.toJSONString(value), clz); + } catch (Exception e) { + log.warn("parse by json failed: {}, error: {}", clz, e.getMessage()); + } return null; } @@ -507,4 +520,41 @@ private static void parserSpecFields(SpecParserContext contextMeta, Map fields = SpecDevUtil.getPropertyFields(specObj); + Optional.ofNullable(specObj.getClass().getSuperclass()).map(Class::getDeclaredFields).map(Arrays::asList).ifPresent( + list -> fields.addAll(1, list)); + + fields.stream() + .filter(f -> !f.getName().contains("$") && !Modifier.isStatic(f.getModifiers())) + .filter(f -> Optional.ofNullable(f.getAnnotation(JSONField.class)).map(JSONField::serialize).orElse(true)) + .forEach(field -> { + field.setAccessible(true); + try { + Object value = field.get(specObj); + if (value == null) { + return; + } + + if (LabelEnum.class.isAssignableFrom(value.getClass())) { + value = ((LabelEnum)value).getLabel(); + } + + if (withoutCollectionFields && (value instanceof Collection || value instanceof Map)) { + return; + } + + json.put(field.getName(), value); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + }); + return json; + } } \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/AbstractWriter.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/AbstractWriter.java index fce248f..4dade33 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/AbstractWriter.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/AbstractWriter.java @@ -15,22 +15,14 @@ package com.aliyun.dataworks.common.spec.writer.impl; -import java.lang.reflect.Field; -import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; -import java.util.Arrays; -import java.util.Collection; import java.util.List; -import java.util.Map; import java.util.Objects; -import java.util.Optional; import com.alibaba.fastjson2.JSONArray; import com.alibaba.fastjson2.JSONObject; -import com.alibaba.fastjson2.annotation.JSONField; -import com.aliyun.dataworks.common.spec.domain.interfaces.LabelEnum; import com.aliyun.dataworks.common.spec.utils.SpecDevUtil; import com.aliyun.dataworks.common.spec.writer.SpecWriterContext; import com.aliyun.dataworks.common.spec.writer.Writer; @@ -69,41 +61,7 @@ public boolean matchType(Class t) { } protected JSONObject writeJsonObject(Object specObj, boolean withoutCollectionFields) { - if (specObj == null) { - return null; - } - - JSONObject json = new JSONObject(); - - List fields = SpecDevUtil.getPropertyFields(specObj); - Optional.ofNullable(specObj.getClass().getSuperclass()).map(Class::getDeclaredFields).map(Arrays::asList).ifPresent( - list -> fields.addAll(1, list)); - - fields.stream() - .filter(f -> !f.getName().contains("$") && !Modifier.isStatic(f.getModifiers())) - .filter(f -> Optional.ofNullable(f.getAnnotation(JSONField.class)).map(JSONField::serialize).orElse(true)) - .forEach(field -> { - field.setAccessible(true); - try { - Object value = field.get(specObj); - if (value == null) { - return; - } - - if (LabelEnum.class.isAssignableFrom(value.getClass())) { - value = ((LabelEnum)value).getLabel(); - } - - if (withoutCollectionFields && (value instanceof Collection || value instanceof Map)) { - return; - } - - json.put(field.getName(), value); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } - }); - return json; + return SpecDevUtil.writeJsonObject(specObj, withoutCollectionFields); } @SuppressWarnings({"rawtypes", "unchecked"}) diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/DataWorksNodeSpecWriter.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/DataWorksNodeSpecWriter.java deleted file mode 100644 index c1c39bd..0000000 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/DataWorksNodeSpecWriter.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2024, Alibaba Cloud; - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.aliyun.dataworks.common.spec.writer.impl; - -import com.alibaba.fastjson2.JSONObject; - -import com.aliyun.dataworks.common.spec.annotation.SpecWriter; -import com.aliyun.dataworks.common.spec.domain.specification.DataWorksNodeSpec; -import com.aliyun.dataworks.common.spec.writer.SpecWriterContext; - -/** - * DataWorksNodeSpec writer - * - * @author 聿剑 - * @date 2023/8/27 - */ -@SpecWriter -public class DataWorksNodeSpecWriter extends DefaultJsonObjectWriter { - public DataWorksNodeSpecWriter(SpecWriterContext context) { - super(context); - } - - @Override - public JSONObject write(DataWorksNodeSpec specObj, SpecWriterContext context) { - JSONObject jsonObject = writeJsonObject(specObj, true); - if (null != specObj.getNode()) { - jsonObject.put("node", writeByWriter(specObj.getNode())); - } - - if (null != specObj.getFlow()) { - jsonObject.put("flow", writeByWriter(specObj.getFlow())); - } - return jsonObject; - } -} \ No newline at end of file diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/DataWorksWorkflowSpecWriter.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/DataWorksWorkflowSpecWriter.java index de2a10e..647a69e 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/DataWorksWorkflowSpecWriter.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/DataWorksWorkflowSpecWriter.java @@ -50,6 +50,10 @@ public JSONObject write(DataWorksWorkflowSpec specObj, SpecWriterContext context jsonObject.put("nodes", writeByWriter(specObj.getNodes())); } + if (CollectionUtils.isNotEmpty(specObj.getWorkflows())) { + jsonObject.put("workflows", writeByWriter(specObj.getWorkflows())); + } + if (CollectionUtils.isNotEmpty(specObj.getFlow())) { jsonObject.put("flow", writeFlow(specObj)); } diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/SpecNodeWriter.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/SpecNodeWriter.java index a6e1a0f..c707374 100644 --- a/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/SpecNodeWriter.java +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/SpecNodeWriter.java @@ -33,12 +33,12 @@ import com.aliyun.dataworks.common.spec.domain.ref.SpecNodeOutput; import com.aliyun.dataworks.common.spec.domain.ref.SpecTable; import com.aliyun.dataworks.common.spec.domain.ref.SpecVariable; -import com.aliyun.dataworks.common.spec.parser.impl.CombinedParser; import com.aliyun.dataworks.common.spec.parser.impl.DoWhileParser; import com.aliyun.dataworks.common.spec.parser.impl.SpecBranchParser; import com.aliyun.dataworks.common.spec.parser.impl.SpecForEachParser; import com.aliyun.dataworks.common.spec.parser.impl.SpecJoinParser; import com.aliyun.dataworks.common.spec.parser.impl.SpecParamHubParser; +import com.aliyun.dataworks.common.spec.parser.impl.SubFlowParser; import com.aliyun.dataworks.common.spec.writer.SpecWriterContext; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.ListUtils; @@ -87,7 +87,8 @@ public JSONObject write(SpecNode specObj, SpecWriterContext context) { json.put(SpecForEachParser.FOREACH, writeByWriter(specObj.getForeach())); json.put(SpecBranchParser.BRANCH, writeByWriter(specObj.getBranch())); json.put(SpecJoinParser.KEY_JOIN, writeByWriter(specObj.getJoin())); - json.put(CombinedParser.KEY_TYPE, writeByWriter(specObj.getCombined())); + json.put(SubFlowParser.KEY_TYPE_COMBINED, writeByWriter(specObj.getCombined())); + json.put(SubFlowParser.KEY_TYPE_SUBFLOW, writeByWriter(specObj.getSubflow())); return json; } diff --git a/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/SpecWorkflowWriter.java b/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/SpecWorkflowWriter.java new file mode 100644 index 0000000..8a015db --- /dev/null +++ b/spec/src/main/java/com/aliyun/dataworks/common/spec/writer/impl/SpecWorkflowWriter.java @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2024, Alibaba Cloud; + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.aliyun.dataworks.common.spec.writer.impl; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import com.alibaba.fastjson2.JSONArray; +import com.alibaba.fastjson2.JSONObject; + +import com.aliyun.dataworks.common.spec.annotation.SpecWriter; +import com.aliyun.dataworks.common.spec.domain.SpecContext; +import com.aliyun.dataworks.common.spec.domain.enums.SpecVersion; +import com.aliyun.dataworks.common.spec.domain.interfaces.NodeIO; +import com.aliyun.dataworks.common.spec.domain.ref.SpecNodeOutput; +import com.aliyun.dataworks.common.spec.domain.ref.SpecTable; +import com.aliyun.dataworks.common.spec.domain.ref.SpecVariable; +import com.aliyun.dataworks.common.spec.domain.ref.SpecWorkflow; +import com.aliyun.dataworks.common.spec.writer.SpecWriterContext; +import org.apache.commons.collections4.ListUtils; + +/** + * Spec node writer + * + * @author 聿剑 + * @date 2023/8/27 + */ +@SpecWriter +public class SpecWorkflowWriter extends DefaultJsonObjectWriter { + public SpecWorkflowWriter(SpecWriterContext context) { + super(context); + } + + @Override + public JSONObject write(SpecWorkflow specObj, SpecWriterContext context) { + JSONObject json = writeJsonObject(specObj, true); + + JSONObject inputs = writeIo(specObj.getInputs()); + json.put("inputs", inputs); + JSONObject outputs = writeIo(specObj.getOutputs()); + json.put("outputs", outputs); + json.put("script", writeByWriter(specObj.getScript())); + json.put("trigger", writeByWriter(specObj.getTrigger())); + json.put("strategy", writeByWriter(specObj.getStrategy())); + json.put("nodes", Optional.ofNullable(specObj.getNodes()).map(nodes -> writerListByWriter(new ArrayList<>(nodes))).orElse(new JSONArray())); + json.put("dependencies", Optional.ofNullable(specObj.getDependencies()).map(dependencies -> writerListByWriter(new ArrayList<>(dependencies))) + .orElse(new JSONArray())); + return json; + } + + private JSONObject writeIo(List ioList) { + if (ioList == null) { + return null; + } + + JSONObject ioJson = new JSONObject(); + Map, List> ioGroup = ListUtils.emptyIfNull(ioList).stream().collect(Collectors.groupingBy(Object::getClass)); + + ioGroup.keySet().stream().sorted(Comparator.comparing(Class::getSimpleName)).forEach(clz -> { + List ios = ioGroup.get(clz); + String key; + JSONArray arr; + if (SpecTable.class.isAssignableFrom(clz)) { + key = "tables"; + } else if (SpecNodeOutput.class.isAssignableFrom(clz)) { + String contextVersion = Optional.ofNullable(context).map(SpecContext::getVersion).orElse(SpecVersion.V_1_1_0.getLabel()); + key = SpecVersion.V_1_0_0.getLabel().equalsIgnoreCase(contextVersion) ? "outputs" : "nodeOutputs"; + } else if (clz.equals(SpecVariable.class)) { + key = "variables"; + } else { + throw new RuntimeException("unsupported input type"); + } + + if (!ioJson.containsKey(key)) { + ioJson.put(key, new JSONArray()); + } + arr = ioJson.getJSONArray(key); + ListUtils.emptyIfNull(ios).stream().map(this::writeByWriter).forEach(arr::add); + }); + return ioJson; + } +} diff --git a/spec/src/test/java/com/aliyun/dataworks/common/spec/SpecUtilTest.java b/spec/src/test/java/com/aliyun/dataworks/common/spec/SpecUtilTest.java index e776644..23598b3 100644 --- a/spec/src/test/java/com/aliyun/dataworks/common/spec/SpecUtilTest.java +++ b/spec/src/test/java/com/aliyun/dataworks/common/spec/SpecUtilTest.java @@ -19,19 +19,29 @@ import java.io.FileNotFoundException; import java.net.URL; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Scanner; +import java.util.stream.Collectors; + +import com.alibaba.fastjson2.JSON; +import com.alibaba.fastjson2.JSONArray; +import com.alibaba.fastjson2.JSONObject; +import com.alibaba.fastjson2.JSONWriter.Feature; import com.aliyun.dataworks.common.spec.domain.DataWorksWorkflowSpec; import com.aliyun.dataworks.common.spec.domain.Spec; +import com.aliyun.dataworks.common.spec.domain.SpecRefEntity; import com.aliyun.dataworks.common.spec.domain.Specification; import com.aliyun.dataworks.common.spec.domain.dw.codemodel.Code; import com.aliyun.dataworks.common.spec.domain.dw.codemodel.CodeModel; import com.aliyun.dataworks.common.spec.domain.dw.codemodel.CodeModelFactory; import com.aliyun.dataworks.common.spec.domain.dw.codemodel.EmrCode; +import com.aliyun.dataworks.common.spec.domain.dw.nodemodel.DataWorksNodeAdapter; import com.aliyun.dataworks.common.spec.domain.dw.nodemodel.DataWorksNodeCodeAdapter; +import com.aliyun.dataworks.common.spec.domain.enums.FailureStrategy; import com.aliyun.dataworks.common.spec.domain.enums.FunctionType; import com.aliyun.dataworks.common.spec.domain.enums.SpecKind; import com.aliyun.dataworks.common.spec.domain.enums.SpecVersion; @@ -45,6 +55,7 @@ import com.aliyun.dataworks.common.spec.domain.noref.SpecJoinBranch; import com.aliyun.dataworks.common.spec.domain.noref.SpecLogic; import com.aliyun.dataworks.common.spec.domain.noref.SpecParamHub; +import com.aliyun.dataworks.common.spec.domain.noref.SpecSubFlow; import com.aliyun.dataworks.common.spec.domain.ref.SpecArtifact; import com.aliyun.dataworks.common.spec.domain.ref.SpecDatasource; import com.aliyun.dataworks.common.spec.domain.ref.SpecDqcRule; @@ -52,12 +63,13 @@ import com.aliyun.dataworks.common.spec.domain.ref.SpecNode; import com.aliyun.dataworks.common.spec.domain.ref.SpecNodeOutput; import com.aliyun.dataworks.common.spec.domain.ref.SpecRuntimeResource; +import com.aliyun.dataworks.common.spec.domain.ref.SpecScheduleStrategy; import com.aliyun.dataworks.common.spec.domain.ref.SpecScript; import com.aliyun.dataworks.common.spec.domain.ref.SpecTable; import com.aliyun.dataworks.common.spec.domain.ref.SpecTrigger; import com.aliyun.dataworks.common.spec.domain.ref.SpecVariable; +import com.aliyun.dataworks.common.spec.domain.ref.SpecWorkflow; import com.aliyun.dataworks.common.spec.domain.ref.component.SpecComponent; -import com.aliyun.dataworks.common.spec.domain.specification.DataWorksNodeSpec; import com.aliyun.dataworks.common.spec.parser.SpecParserContext; import com.aliyun.dataworks.common.spec.utils.GsonUtils; import com.aliyun.dataworks.common.spec.utils.SpecDevUtil; @@ -65,11 +77,9 @@ import com.aliyun.dataworks.common.spec.writer.Writer; import com.aliyun.dataworks.common.spec.writer.WriterFactory; import com.aliyun.dataworks.common.spec.writer.impl.SpecificationWriter; - -import com.alibaba.fastjson2.JSON; -import com.alibaba.fastjson2.JSONWriter.Feature; - import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.collections4.ListUtils; import org.apache.commons.lang3.StringUtils; import org.junit.Assert; import org.junit.Test; @@ -103,14 +113,14 @@ public void testExample() { // variable的node是否一致 SpecVariable variable_ctx_output_1 = specification.getVariables().stream().filter( - v -> v.getId().equals("ctx_output_1")).findFirst().get(); + v -> v.getId().equals("ctx_output_1")).findFirst().get(); SpecNode specNode_node_existed_xx = specification.getNodes().stream().filter( - n -> n.getId().equals("node_existed_xx")).findFirst().get(); + n -> n.getId().equals("node_existed_xx")).findFirst().get(); // Assert.assertSame(variable_ctx_output_1.getNode(), specNode_node_existed_xx); // node的Script是否一致 SpecNode specNode_node_1 = specification.getNodes().stream().filter(n -> n.getId().equals("node_1")).findFirst() - .get(); + .get(); List inputs = specNode_node_1.getInputs(); for (Input input : inputs) { @@ -118,7 +128,7 @@ public void testExample() { } SpecScript scriptFile1 = specification.getScripts().stream().filter(s -> s.getId().equals("script_file1")) - .findFirst().get(); + .findFirst().get(); Assert.assertSame(scriptFile1, specNode_node_1.getScript()); // 变量的类型是否正确 @@ -128,65 +138,65 @@ public void testExample() { // Script的parameters和variable是否一致 SpecVariable variable_biz = specification.getVariables().stream().filter(v -> v.getId().equals("bizdate")) - .findFirst().get(); + .findFirst().get(); SpecVariable specVariable1 = scriptFile1.getParameters().stream().filter(s -> s.getId().equals("bizdate")) - .findFirst().get(); + .findFirst().get(); Assert.assertSame(variable_biz, specVariable1); // Node的input中的artifacts是否一致 SpecArtifact node_specArtifact_table1 = specNode_node_1.getInputs().stream().filter( - input -> input instanceof SpecArtifact).map(input -> (SpecArtifact) input).filter( - specArtifact -> specArtifact.getId().equals("table1")).findFirst().get(); + input -> input instanceof SpecArtifact).map(input -> (SpecArtifact)input).filter( + specArtifact -> specArtifact.getId().equals("table1")).findFirst().get(); SpecArtifact specArtifact_table1 = specification.getArtifacts().stream().filter( - specArtifact -> specArtifact.getId().equals("table1")).findFirst().get(); + specArtifact -> specArtifact.getId().equals("table1")).findFirst().get(); Assert.assertSame(node_specArtifact_table1, specArtifact_table1); // Node的input中的variables是否一致 SpecVariable node_specArtifact_var = specNode_node_1.getInputs().stream().filter( - input -> input instanceof SpecVariable).map(input -> (SpecVariable) input).filter( - v -> v.getId().equals("ctx_output_1")).findFirst().get(); + input -> input instanceof SpecVariable).map(input -> (SpecVariable)input).filter( + v -> v.getId().equals("ctx_output_1")).findFirst().get(); Assert.assertSame(variable_ctx_output_1, node_specArtifact_var); // Node的output中的artifacts是否一致 SpecArtifact node_specArtifact_artifact2 = specNode_node_1.getOutputs().stream().filter( - output -> output instanceof SpecArtifact).map(output -> (SpecArtifact) output).filter( - a -> a.getId().equals("table3")).findFirst().get(); + output -> output instanceof SpecArtifact).map(output -> (SpecArtifact)output).filter( + a -> a.getId().equals("table3")).findFirst().get(); SpecArtifact specArtifact_table3 = specification.getArtifacts().stream().filter( - specArtifact -> specArtifact.getId().equals("table3")).findFirst().get(); + specArtifact -> specArtifact.getId().equals("table3")).findFirst().get(); Assert.assertSame(node_specArtifact_artifact2, specArtifact_table3); // Node的output中的variables是否一致 SpecVariable node_specVariable_var1 = specNode_node_1.getOutputs().stream().filter( - output -> output instanceof SpecVariable).map(output -> (SpecVariable) output).filter( - v -> v.getId().equals("region")).findFirst().get(); + output -> output instanceof SpecVariable).map(output -> (SpecVariable)output).filter( + v -> v.getId().equals("region")).findFirst().get(); SpecVariable specVariable_var1 = specification.getVariables().stream().filter( - variable -> variable.getId().equals("region")).findFirst().get(); + variable -> variable.getId().equals("region")).findFirst().get(); Assert.assertSame(node_specVariable_var1, specVariable_var1); // Node的trigger是否一致 SpecTrigger trigger = specNode_node_1.getTrigger(); SpecTrigger specTrigger = specification.getTriggers().stream().filter(t -> t.getId().equals("daily")) - .findFirst().get(); + .findFirst().get(); Assert.assertSame(trigger, specTrigger); // Node的runtimeResource是否一致 SpecRuntimeResource runtimeResource = specNode_node_1.getRuntimeResource(); SpecRuntimeResource resgroup1 = specification.getRuntimeResources().stream().filter( - r -> r.getId().equals("resgroup_1")).findFirst().get(); + r -> r.getId().equals("resgroup_1")).findFirst().get(); Assert.assertSame(resgroup1, runtimeResource); // Flow的nodeId是否一致 SpecFlowDepend specFlow_Depend_node = specification.getFlow().stream().filter( - f -> f.getNodeId().getId().equals("node_1")).findFirst().get(); + f -> f.getNodeId().getId().equals("node_1")).findFirst().get(); Assert.assertSame(specFlow_Depend_node.getNodeId(), specNode_node_1); // Flow中的Depends中的nodeID是否一致 SpecDepend nodeExistedXx = specFlow_Depend_node.getDepends().stream().filter( - d -> d.getNodeId().getId().equals("node_existed_xx")).findFirst().get(); + d -> d.getNodeId().getId().equals("node_existed_xx")).findFirst().get(); Assert.assertSame(specNode_node_existed_xx, nodeExistedXx.getNodeId()); Assert.assertNotNull(specNode_node_1.getDatasource()); @@ -211,19 +221,19 @@ public void testBranch() { Assert.assertNotNull(specification.getFlow()); SpecNode specNode_branch = specification.getNodes().stream().filter(n -> n.getId().equals("branch")).findFirst() - .get(); + .get(); // node branch的output是否一致 SpecBranches specBranches = specNode_branch.getBranch().getBranches().stream().filter(b -> b.getWhen().equals("a == 1")) - .findFirst().get(); + .findFirst().get(); SpecArtifact artifact = specification.getArtifacts().stream().filter(a -> a.getId().equals("branch_1")) - .findFirst().get(); + .findFirst().get(); Assert.assertSame(artifact, specBranches.getOutput()); // flow的output是否一致 SpecFlowDepend specFlowDepend = specification.getFlow().stream().filter( - f -> f.getNodeId().getId().equals("branch_1")).findFirst().get(); + f -> f.getNodeId().getId().equals("branch_1")).findFirst().get(); SpecDepend specDepend = specFlowDepend.getDepends().stream().findFirst().get(); Assert.assertSame(specDepend.getOutput(), artifact); @@ -287,16 +297,16 @@ public void testInnerFlow() { Assert.assertNotNull(doWhile.getNodes()); // script SpecScript script_sql1 = specification.getScripts().stream().filter(s -> s.getId().equals("sql1")).findFirst() - .get(); + .get(); SpecNode specNode1 = doWhile.getNodes().stream().filter(specNode -> specNode.getId().equals("sql1")).findFirst() - .get(); + .get(); Assert.assertSame(specNode1.getScript(), script_sql1); // while Assert.assertNotNull(doWhile.getSpecWhile()); SpecScript script_end = specification.getScripts().stream().filter(s -> s.getId().equals("end")).findFirst() - .get(); + .get(); Assert.assertSame(doWhile.getSpecWhile().getScript(), script_end); // flow @@ -315,7 +325,7 @@ public void testInnerFlow() { // variable 是否是同一对象 SpecVariable specVariable = specification.getVariables().stream().filter(v -> v.getId().equals("var_arr")) - .findFirst().get(); + .findFirst().get(); Assert.assertSame(foreach.getArray(), specVariable); // nodes中的script是否统一对象 @@ -378,7 +388,7 @@ public void testParameter_node() { Assert.assertNotNull(specification); SpecVariable ctx_var_2 = specification.getVariables().stream().filter(n -> n.getId().equals("ctx_var_2")) - .findFirst().get(); + .findFirst().get(); Assert.assertSame(ctx_var_2.getScope(), VariableScopeType.NODE_PARAMETER); } @@ -431,71 +441,71 @@ public String readJson(String fileName) { @Test public void testParse() { String spec = "{\n" - + " \"version\": \"1.0.0\",\n" - + " \"kind\": \"CycleWorkflow\",\n" - + " \"nodes\": [\n" - + " {\n" - + " \"id\": \"c05cc423ac8046a7b18ccc9dd88ef27e\",\n" - + " \"recurrence\": \"Normal\",\n" - + " \"timeout\": 3,\n" - + " \"instanceMode\": \"T+1\",\n" - + " \"rerunMode\": \"Allowed\",\n" - + " \"rerunTimes\": 3,\n" - + " \"rerunInterval\": 180000,\n" - + " \"script\": {\n" - + " \"language\": \"odps\",\n" - + " \"runtime\": {\n" - + " \"engine\": \"MaxCompute\",\n" - + " \"command\": \"ODPS_SQL\"\n" - + " },\n" - + " \"parameters\": [\n" - + " {\n" - + " \"name\": \"bizdate\",\n" - + " \"scope\": \"NodeParameter\",\n" - + " \"type\": \"System\",\n" - + " \"value\": \"$[yyyymmdd]\"\n" - + " }\n" - + " ]\n" - + " },\n" - + " \"trigger\": {\n" - + " \"id\": \"ddb2d936a16a4a45bc34b68c30d05f84\",\n" - + " \"type\": \"Scheduler\",\n" - + " \"cron\": \"00 00 00 * * ?\",\n" - + " \"startTime\": \"1970-01-01 00:00:00\",\n" - + " \"endTime\": \"9999-01-01 00:00:00\",\n" - + " \"timezone\": \"Asia/Shanghai\"\n" - + " },\n" - + " \"runtimeResource\": {\n" - + " \"resourceGroup\": \"dataphin_scheduler_pre\"\n" - + " },\n" - + " \"name\": \"p_param_2\",\n" - + " \"owner\": \"064152\",\n" - + " \"inputs\": {},\n" - + " \"outputs\": {\n" - + " \"outputs\": [\n" - + " {\n" - + " \"type\": \"Output\",\n" - + " \"data\": \"c05cc423ac8046a7b18ccc9dd88ef27e\",\n" - + " \"refTableName\": \"p_param_2\"\n" - + " }\n" - + " ]\n" - + " },\n" - + " \"functions\": [],\n" - + " \"fileResources\": []\n" - + " }\n" - + " ],\n" - + " \"flow\": [\n" - + " {\n" - + " \"nodeId\": \"c05cc423ac8046a7b18ccc9dd88ef27e\",\n" - + " \"depends\": [\n" - + " {\n" - + " \"type\": \"Normal\",\n" - + " \"output\": \"dw_scheduler_pre.test_sql002\"\n" - + " }\n" - + " ]\n" - + " }\n" - + " ]\n" - + " }"; + + " \"version\": \"1.0.0\",\n" + + " \"kind\": \"CycleWorkflow\",\n" + + " \"nodes\": [\n" + + " {\n" + + " \"id\": \"c05cc423ac8046a7b18ccc9dd88ef27e\",\n" + + " \"recurrence\": \"Normal\",\n" + + " \"timeout\": 3,\n" + + " \"instanceMode\": \"T+1\",\n" + + " \"rerunMode\": \"Allowed\",\n" + + " \"rerunTimes\": 3,\n" + + " \"rerunInterval\": 180000,\n" + + " \"script\": {\n" + + " \"language\": \"odps\",\n" + + " \"runtime\": {\n" + + " \"engine\": \"MaxCompute\",\n" + + " \"command\": \"ODPS_SQL\"\n" + + " },\n" + + " \"parameters\": [\n" + + " {\n" + + " \"name\": \"bizdate\",\n" + + " \"scope\": \"NodeParameter\",\n" + + " \"type\": \"System\",\n" + + " \"value\": \"$[yyyymmdd]\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"trigger\": {\n" + + " \"id\": \"ddb2d936a16a4a45bc34b68c30d05f84\",\n" + + " \"type\": \"Scheduler\",\n" + + " \"cron\": \"00 00 00 * * ?\",\n" + + " \"startTime\": \"1970-01-01 00:00:00\",\n" + + " \"endTime\": \"9999-01-01 00:00:00\",\n" + + " \"timezone\": \"Asia/Shanghai\"\n" + + " },\n" + + " \"runtimeResource\": {\n" + + " \"resourceGroup\": \"dataphin_scheduler_pre\"\n" + + " },\n" + + " \"name\": \"p_param_2\",\n" + + " \"owner\": \"064152\",\n" + + " \"inputs\": {},\n" + + " \"outputs\": {\n" + + " \"outputs\": [\n" + + " {\n" + + " \"type\": \"Output\",\n" + + " \"data\": \"c05cc423ac8046a7b18ccc9dd88ef27e\",\n" + + " \"refTableName\": \"p_param_2\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"functions\": [],\n" + + " \"fileResources\": []\n" + + " }\n" + + " ],\n" + + " \"flow\": [\n" + + " {\n" + + " \"nodeId\": \"c05cc423ac8046a7b18ccc9dd88ef27e\",\n" + + " \"depends\": [\n" + + " {\n" + + " \"type\": \"Normal\",\n" + + " \"output\": \"dw_scheduler_pre.test_sql002\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }"; Specification specObj = SpecUtil.parseToDomain(spec); Assert.assertNotNull(specObj); @@ -522,9 +532,9 @@ public void testScriptRuntimeTemplate() { Assert.assertNotNull(script.getRuntime().getTemplate()); CodeModel codeModel - = CodeModelFactory.getCodeModel(script.getRuntime().getCommand(), JSON.toJSONString(script.getRuntime().getTemplate())); + = CodeModelFactory.getCodeModel(script.getRuntime().getCommand(), JSON.toJSONString(script.getRuntime().getTemplate())); - EmrCode emrCode = (EmrCode) codeModel.getCodeModel(); + EmrCode emrCode = (EmrCode)codeModel.getCodeModel(); emrCode.setName("test emr name"); emrCode.getProperties().getEnvs().put("test_v1", "v1"); @@ -573,71 +583,71 @@ public void testCombinedNode() { @Test public void testParamHubParser() { String spec = "{\n" - + " \"variables\": [\n" - + " {\n" - + " \"name\": \"my_const\",\n" - + " \"type\": \"Constant\",\n" - + " \"scope\": \"NodeContext\",\n" - + " \"value\": \"cn-shanghai\"\n" - + " \"description\": \"cn-shanghai\"\n" - + " },\n" - + " {\n" - + " \"name\": \"my_var\",\n" - + " \"type\": \"System\",\n" - + " \"scope\": \"NodeContext\",\n" - + " \"value\": \"${yyyymmdd}\"\n" - + " },\n" - + " {\n" - + " \"name\": \"outputs\",\n" - + " \"type\": \"PassThrough\",\n" - + " \"scope\": \"NodeContext\",\n" - + " \"referenceVariable\": {\n" - + " \"name\": \"outputs\",\n" - + " \"type\": \"NodeOutput\",\n" - + " \"scope\": \"NodeContext\",\n" - + " \"value\": \"${outputs}\",\n" - + " \"node\": {\n" - + " \"output\": \"autotest.28517448_out\"\n" - + " }\n" - + " }\n" - + " },\n" - + " {\n" - + " \"name\": \"shell_const_1\",\n" - + " \"type\": \"PassThrough\",\n" - + " \"scope\": \"NodeContext\",\n" - + " \"referenceVariable\": {\n" - + " \"name\": \"shell_const_1\",\n" - + " \"type\": \"NodeOutput\",\n" - + " \"scope\": \"NodeContext\",\n" - + " \"node\": {\n" - + " \"output\": \"autotest.28517347_out\"\n" - + " }\n" - + " }\n" - + " },\n" - + " {\n" - + " \"name\": \"shell_var_1\",\n" - + " \"type\": \"PassThrough\",\n" - + " \"scope\": \"NodeContext\",\n" - + " \"referenceVariable\": {\n" - + " \"name\": \"shell_var_1\",\n" - + " \"type\": \"NodeOutput\",\n" - + " \"scope\": \"NodeContext\",\n" - + " \"node\": {\n" - + " \"output\": \"autotest.28517347_out\"\n" - + " }\n" - + " }\n" - + " }\n" - + " ]\n" - + " }"; + + " \"variables\": [\n" + + " {\n" + + " \"name\": \"my_const\",\n" + + " \"type\": \"Constant\",\n" + + " \"scope\": \"NodeContext\",\n" + + " \"value\": \"cn-shanghai\"\n" + + " \"description\": \"cn-shanghai\"\n" + + " },\n" + + " {\n" + + " \"name\": \"my_var\",\n" + + " \"type\": \"System\",\n" + + " \"scope\": \"NodeContext\",\n" + + " \"value\": \"${yyyymmdd}\"\n" + + " },\n" + + " {\n" + + " \"name\": \"outputs\",\n" + + " \"type\": \"PassThrough\",\n" + + " \"scope\": \"NodeContext\",\n" + + " \"referenceVariable\": {\n" + + " \"name\": \"outputs\",\n" + + " \"type\": \"NodeOutput\",\n" + + " \"scope\": \"NodeContext\",\n" + + " \"value\": \"${outputs}\",\n" + + " \"node\": {\n" + + " \"output\": \"autotest.28517448_out\"\n" + + " }\n" + + " }\n" + + " },\n" + + " {\n" + + " \"name\": \"shell_const_1\",\n" + + " \"type\": \"PassThrough\",\n" + + " \"scope\": \"NodeContext\",\n" + + " \"referenceVariable\": {\n" + + " \"name\": \"shell_const_1\",\n" + + " \"type\": \"NodeOutput\",\n" + + " \"scope\": \"NodeContext\",\n" + + " \"node\": {\n" + + " \"output\": \"autotest.28517347_out\"\n" + + " }\n" + + " }\n" + + " },\n" + + " {\n" + + " \"name\": \"shell_var_1\",\n" + + " \"type\": \"PassThrough\",\n" + + " \"scope\": \"NodeContext\",\n" + + " \"referenceVariable\": {\n" + + " \"name\": \"shell_var_1\",\n" + + " \"type\": \"NodeOutput\",\n" + + " \"scope\": \"NodeContext\",\n" + + " \"node\": {\n" + + " \"output\": \"autotest.28517347_out\"\n" + + " }\n" + + " }\n" + + " }\n" + + " ]\n" + + " }"; SpecParserContext ctx = new SpecParserContext(); ctx.setVersion(SpecVersion.V_1_1_0.getLabel()); - SpecParamHub paramHub = (SpecParamHub) SpecDevUtil.getObjectByParser(SpecParamHub.class, JSON.parseObject(spec), ctx); + SpecParamHub paramHub = (SpecParamHub)SpecDevUtil.getObjectByParser(SpecParamHub.class, JSON.parseObject(spec), ctx); log.info("para hub: {}", GsonUtils.toJsonString(paramHub)); Assert.assertNotNull(paramHub); Assert.assertNotNull(paramHub.getVariables()); Assert.assertNotNull(paramHub.getVariables().stream().filter(v -> v.getName().equals("outputs")).findFirst() - .map(SpecVariable::getReferenceVariable).map(SpecVariable::getNode).map(SpecDepend::getOutput).map(SpecNodeOutput::getData) - .orElse(null)); + .map(SpecVariable::getReferenceVariable).map(SpecVariable::getNode).map(SpecDepend::getOutput).map(SpecNodeOutput::getData) + .orElse(null)); } @Test @@ -666,7 +676,7 @@ public void testParamHub() { Assert.assertEquals(5, node.getParamHub().getVariables().size()); SpecVariable shellVar1 = node.getParamHub().getVariables().stream() - .filter(v -> v.getName().equalsIgnoreCase("shell_var_1")).findFirst().orElse(null); + .filter(v -> v.getName().equalsIgnoreCase("shell_var_1")).findFirst().orElse(null); Assert.assertNotNull(shellVar1); Assert.assertNotNull(shellVar1.getReferenceVariable()); Assert.assertNotNull(shellVar1.getReferenceVariable().getNode()); @@ -778,9 +788,9 @@ public void testTable() { Assert.assertTrue(table instanceof SpecTable); - Assert.assertNotNull(((SpecTable) table).getDdl()); - Assert.assertNotNull(((SpecTable) table).getCalcEngine()); - Assert.assertNotNull(((SpecTable) table).getName()); + Assert.assertNotNull(((SpecTable)table).getDdl()); + Assert.assertNotNull(((SpecTable)table).getCalcEngine()); + Assert.assertNotNull(((SpecTable)table).getName()); log.info("spec: {}", SpecUtil.writeToSpec(specObj)); } @@ -883,231 +893,231 @@ public void testDqcRule() { @Test public void testNodeIdMissing() { String s = "{\n" - + "\t\"version\":\"1.1.0\",\n" - + "\t\"kind\":\"CycleWorkflow\",\n" - + "\t\"spec\":{\n" - + "\t\t\"nodes\":[\n" - + "\t\t\t{\n" - + "\t\t\t\t\"recurrence\":\"Normal\",\n" - + "\t\t\t\t\"id\":\"11195215\",\n" - + "\t\t\t\t\"instanceMode\":\"T+1\",\n" - + "\t\t\t\t\"rerunMode\":\"Allowed\",\n" - + "\t\t\t\t\"rerunTimes\":0,\n" - + "\t\t\t\t\"rerunInterval\":0,\n" - + "\t\t\t\t\"script\":{\n" - + "\t\t\t\t\t\"path\":\"业务流程/预发回归case_请不要加东西/控制\",\n" - + "\t\t\t\t\t\"runtime\":{\n" - + "\t\t\t\t\t\t\"command\":\"CONTROLLER_TRAVERSE\"\n" - + "\t\t\t\t\t}\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"trigger\":{\n" - + "\t\t\t\t\t\"type\":\"Scheduler\",\n" - + "\t\t\t\t\t\"cron\":\"00 25 00 * * ?\",\n" - + "\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" - + "\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\"\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"runtimeResource\":{\n" - + "\t\t\t\t\t\"resourceGroup\":\"group_2\"\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"name\":\"foreach_regr\",\n" - + "\t\t\t\t\"owner\":\"068198\",\n" - + "\t\t\t\t\"inputs\":{\n" - + "\t\t\t\t\t\"nodeOutputs\":[\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195181_out\",\n" - + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" - + "\t\t\t\t\t\t\t\"refTableName\":\"dw_scheduler_pre.11195181_out\"\n" - + "\t\t\t\t\t\t}\n" - + "\t\t\t\t\t]\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"outputs\":{\n" - + "\t\t\t\t\t\"nodeOutputs\":[\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195215_out\",\n" - + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" - + "\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.foreach_regr\",\n" - + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" - + "\t\t\t\t\t\t}\n" - + "\t\t\t\t\t]\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"for-each\":{\n" - + "\t\t\t\t\t\"nodes\":[\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"recurrence\":\"Normal\",\n" - + "\t\t\t\t\t\t\t\"id\":\"11195231\",\n" - + "\t\t\t\t\t\t\t\"instanceMode\":\"T+1\",\n" - + "\t\t\t\t\t\t\t\"rerunMode\":\"Allowed\",\n" - + "\t\t\t\t\t\t\t\"rerunTimes\":0,\n" - + "\t\t\t\t\t\t\t\"rerunInterval\":0,\n" - + "\t\t\t\t\t\t\t\"script\":{\n" - + "\t\t\t\t\t\t\t\t\"runtime\":{\n" - + "\t\t\t\t\t\t\t\t\t\"command\":\"DIDE_SHELL\"\n" - + "\t\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t\t\"trigger\":{\n" - + "\t\t\t\t\t\t\t\t\"type\":\"Scheduler\",\n" - + "\t\t\t\t\t\t\t\t\"cron\":\"00,00\",\n" - + "\t\t\t\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" - + "\t\t\t\t\t\t\t\t\"endTime\":\"9999-01-01 16:03:47\"\n" - + "\t\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t\t\"runtimeResource\":{\n" - + "\t\t\t\t\t\t\t\t\"resourceGroup\":\"group_2\"\n" - + "\t\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t\t\"name\":\"echo_data\",\n" - + "\t\t\t\t\t\t\t\"owner\":\"068198\",\n" - + "\t\t\t\t\t\t\t\"inputs\":{\n" - + "\t\t\t\t\t\t\t\t\"nodeOutputs\":[\n" - + "\t\t\t\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195216_out\",\n" - + "\t\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" - + "\t\t\t\t\t\t\t\t\t\t\"refTableName\":\"dw_scheduler_pre.11195216_out\"\n" - + "\t\t\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t\t\t]\n" - + "\t\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t\t\"outputs\":{\n" - + "\t\t\t\t\t\t\t\t\"nodeOutputs\":[\n" - + "\t\t\t\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195231_out\",\n" - + "\t\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" - + "\t\t\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t\t\t]\n" - + "\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"recurrence\":\"Normal\",\n" - + "\t\t\t\t\t\t\t\"id\":\"11195217\",\n" - + "\t\t\t\t\t\t\t\"instanceMode\":\"T+1\",\n" - + "\t\t\t\t\t\t\t\"rerunMode\":\"Allowed\",\n" - + "\t\t\t\t\t\t\t\"rerunTimes\":0,\n" - + "\t\t\t\t\t\t\t\"rerunInterval\":0,\n" - + "\t\t\t\t\t\t\t\"script\":{\n" - + "\t\t\t\t\t\t\t\t\"runtime\":{\n" - + "\t\t\t\t\t\t\t\t\t\"command\":\"CONTROLLER_TRAVERSE_END\"\n" - + "\t\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t\t\"trigger\":{\n" - + "\t\t\t\t\t\t\t\t\"type\":\"Manual\",\n" - + "\t\t\t\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" - + "\t\t\t\t\t\t\t\t\"endTime\":\"9999-01-01 16:02:31\"\n" - + "\t\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t\t\"name\":\"end\",\n" - + "\t\t\t\t\t\t\t\"owner\":\"068198\",\n" - + "\t\t\t\t\t\t\t\"inputs\":{\n" - + "\t\t\t\t\t\t\t\t\"nodeOutputs\":[\n" - + "\t\t\t\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195231_out\",\n" - + "\t\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" - + "\t\t\t\t\t\t\t\t\t\t\"refTableName\":\"dw_scheduler_pre.11195231_out\"\n" - + "\t\t\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t\t\t]\n" - + "\t\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t\t\"outputs\":{\n" - + "\t\t\t\t\t\t\t\t\"nodeOutputs\":[\n" - + "\t\t\t\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195217_out\",\n" - + "\t\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" - + "\t\t\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t\t\t]\n" - + "\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"recurrence\":\"Normal\",\n" - + "\t\t\t\t\t\t\t\"id\":\"11195216\",\n" - + "\t\t\t\t\t\t\t\"instanceMode\":\"T+1\",\n" - + "\t\t\t\t\t\t\t\"rerunMode\":\"Allowed\",\n" - + "\t\t\t\t\t\t\t\"rerunTimes\":0,\n" - + "\t\t\t\t\t\t\t\"rerunInterval\":0,\n" - + "\t\t\t\t\t\t\t\"script\":{\n" - + "\t\t\t\t\t\t\t\t\"runtime\":{\n" - + "\t\t\t\t\t\t\t\t\t\"command\":\"CONTROLLER_TRAVERSE_START\"\n" - + "\t\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t\t\"trigger\":{\n" - + "\t\t\t\t\t\t\t\t\"type\":\"Manual\",\n" - + "\t\t\t\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" - + "\t\t\t\t\t\t\t\t\"endTime\":\"9999-01-01 16:02:31\"\n" - + "\t\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t\t\"name\":\"start\",\n" - + "\t\t\t\t\t\t\t\"owner\":\"068198\",\n" - + "\t\t\t\t\t\t\t\"inputs\":{\n" - + "\t\t\t\t\t\t\t\t\n" - + "\t\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t\t\"outputs\":{\n" - + "\t\t\t\t\t\t\t\t\"nodeOutputs\":[\n" - + "\t\t\t\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195216_out\",\n" - + "\t\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" - + "\t\t\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t\t\t]\n" - + "\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t}\n" - + "\t\t\t\t\t],\n" - + "\t\t\t\t\t\"flow\":[\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"nodeId\":\"11195231\",\n" - + "\t\t\t\t\t\t\t\"depends\":[\n" - + "\t\t\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\t\t\"type\":\"Normal\",\n" - + "\t\t\t\t\t\t\t\t\t\"output\":\"dw_scheduler_pre.11195216_out\"\n" - + "\t\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t\t]\n" - + "\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"nodeId\":\"11195217\",\n" - + "\t\t\t\t\t\t\t\"depends\":[\n" - + "\t\t\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\t\t\"type\":\"Normal\",\n" - + "\t\t\t\t\t\t\t\t\t\"output\":\"dw_scheduler_pre.11195231_out\"\n" - + "\t\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t\t]\n" - + "\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"nodeId\":\"11195216\",\n" - + "\t\t\t\t\t\t\t\"depends\":[\n" - + "\t\t\t\t\t\t\t\t\n" - + "\t\t\t\t\t\t\t]\n" - + "\t\t\t\t\t\t}\n" - + "\t\t\t\t\t],\n" - + "\t\t\t\t\t\"array\":{\n" - + "\t\t\t\t\t\t\"name\":\"loopDataArray\",\n" - + "\t\t\t\t\t\t\"artifactType\":\"Variable\",\n" - + "\t\t\t\t\t\t\"scope\":\"NodeContext\",\n" - + "\t\t\t\t\t\t\"type\":\"Constant\",\n" - + "\t\t\t\t\t\t\"node\":{\n" - + "\t\t\t\t\t\t\t\"nodeId\":\"11195215\"\n" - + "\t\t\t\t\t\t},\n" - + "\t\t\t\t\t\t\"referenceVariable\":{\n" - + "\t\t\t\t\t\t\t\"name\":\"outputs\",\n" - + "\t\t\t\t\t\t\t\"artifactType\":\"Variable\",\n" - + "\t\t\t\t\t\t\t\"scope\":\"NodeContext\",\n" - + "\t\t\t\t\t\t\t\"type\":\"NodeOutput\",\n" - + "\t\t\t\t\t\t\t\"node\":{\n" - + "\t\t\t\t\t\t\t\t\"nodeId\":\"11195181\",\n" - + "\t\t\t\t\t\t\t\t\"output\":\"dw_scheduler_pre.11195181_out\"\n" - + "\t\t\t\t\t\t\t}\n" - + "\t\t\t\t\t\t}\n" - + "\t\t\t\t\t}\n" - + "\t\t\t\t}\n" - + "\t\t\t}\n" - + "\t\t],\n" - + "\t\t\"flow\":[\n" - + "\t\t\t{\n" - + "\t\t\t\t\"nodeId\":\"11195215\",\n" - + "\t\t\t\t\"depends\":[\n" - + "\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\"type\":\"Normal\",\n" - + "\t\t\t\t\t\t\"output\":\"dw_scheduler_pre.11195181_out\"\n" - + "\t\t\t\t\t}\n" - + "\t\t\t\t]\n" - + "\t\t\t}\n" - + "\t\t]\n" - + "\t},\n" - + "\t\"metadata\":{\n" - + "\t\t\"owner\":\"068198\"\n" - + "\t}\n" - + "}"; + + "\t\"version\":\"1.1.0\",\n" + + "\t\"kind\":\"CycleWorkflow\",\n" + + "\t\"spec\":{\n" + + "\t\t\"nodes\":[\n" + + "\t\t\t{\n" + + "\t\t\t\t\"recurrence\":\"Normal\",\n" + + "\t\t\t\t\"id\":\"11195215\",\n" + + "\t\t\t\t\"instanceMode\":\"T+1\",\n" + + "\t\t\t\t\"rerunMode\":\"Allowed\",\n" + + "\t\t\t\t\"rerunTimes\":0,\n" + + "\t\t\t\t\"rerunInterval\":0,\n" + + "\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\"path\":\"业务流程/预发回归case_请不要加东西/控制\",\n" + + "\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\"command\":\"CONTROLLER_TRAVERSE\"\n" + + "\t\t\t\t\t}\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"trigger\":{\n" + + "\t\t\t\t\t\"type\":\"Scheduler\",\n" + + "\t\t\t\t\t\"cron\":\"00 25 00 * * ?\",\n" + + "\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"runtimeResource\":{\n" + + "\t\t\t\t\t\"resourceGroup\":\"group_2\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"name\":\"foreach_regr\",\n" + + "\t\t\t\t\"owner\":\"068198\",\n" + + "\t\t\t\t\"inputs\":{\n" + + "\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195181_out\",\n" + + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\"refTableName\":\"dw_scheduler_pre.11195181_out\"\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t]\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"outputs\":{\n" + + "\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195215_out\",\n" + + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.foreach_regr\",\n" + + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t]\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"for-each\":{\n" + + "\t\t\t\t\t\"nodes\":[\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"recurrence\":\"Normal\",\n" + + "\t\t\t\t\t\t\t\"id\":\"11195231\",\n" + + "\t\t\t\t\t\t\t\"instanceMode\":\"T+1\",\n" + + "\t\t\t\t\t\t\t\"rerunMode\":\"Allowed\",\n" + + "\t\t\t\t\t\t\t\"rerunTimes\":0,\n" + + "\t\t\t\t\t\t\t\"rerunInterval\":0,\n" + + "\t\t\t\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\t\t\t\"command\":\"DIDE_SHELL\"\n" + + "\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"trigger\":{\n" + + "\t\t\t\t\t\t\t\t\"type\":\"Scheduler\",\n" + + "\t\t\t\t\t\t\t\t\"cron\":\"00,00\",\n" + + "\t\t\t\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\t\"endTime\":\"9999-01-01 16:03:47\"\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"runtimeResource\":{\n" + + "\t\t\t\t\t\t\t\t\"resourceGroup\":\"group_2\"\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"name\":\"echo_data\",\n" + + "\t\t\t\t\t\t\t\"owner\":\"068198\",\n" + + "\t\t\t\t\t\t\t\"inputs\":{\n" + + "\t\t\t\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195216_out\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"refTableName\":\"dw_scheduler_pre.11195216_out\"\n" + + "\t\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"outputs\":{\n" + + "\t\t\t\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195231_out\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" + + "\t\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"recurrence\":\"Normal\",\n" + + "\t\t\t\t\t\t\t\"id\":\"11195217\",\n" + + "\t\t\t\t\t\t\t\"instanceMode\":\"T+1\",\n" + + "\t\t\t\t\t\t\t\"rerunMode\":\"Allowed\",\n" + + "\t\t\t\t\t\t\t\"rerunTimes\":0,\n" + + "\t\t\t\t\t\t\t\"rerunInterval\":0,\n" + + "\t\t\t\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\t\t\t\"command\":\"CONTROLLER_TRAVERSE_END\"\n" + + "\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"trigger\":{\n" + + "\t\t\t\t\t\t\t\t\"type\":\"Manual\",\n" + + "\t\t\t\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\t\"endTime\":\"9999-01-01 16:02:31\"\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"name\":\"end\",\n" + + "\t\t\t\t\t\t\t\"owner\":\"068198\",\n" + + "\t\t\t\t\t\t\t\"inputs\":{\n" + + "\t\t\t\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195231_out\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"refTableName\":\"dw_scheduler_pre.11195231_out\"\n" + + "\t\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"outputs\":{\n" + + "\t\t\t\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195217_out\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" + + "\t\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"recurrence\":\"Normal\",\n" + + "\t\t\t\t\t\t\t\"id\":\"11195216\",\n" + + "\t\t\t\t\t\t\t\"instanceMode\":\"T+1\",\n" + + "\t\t\t\t\t\t\t\"rerunMode\":\"Allowed\",\n" + + "\t\t\t\t\t\t\t\"rerunTimes\":0,\n" + + "\t\t\t\t\t\t\t\"rerunInterval\":0,\n" + + "\t\t\t\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\t\t\t\"command\":\"CONTROLLER_TRAVERSE_START\"\n" + + "\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"trigger\":{\n" + + "\t\t\t\t\t\t\t\t\"type\":\"Manual\",\n" + + "\t\t\t\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\t\"endTime\":\"9999-01-01 16:02:31\"\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"name\":\"start\",\n" + + "\t\t\t\t\t\t\t\"owner\":\"068198\",\n" + + "\t\t\t\t\t\t\t\"inputs\":{\n" + + "\t\t\t\t\t\t\t\t\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"outputs\":{\n" + + "\t\t\t\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\t\"data\":\"dw_scheduler_pre.11195216_out\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\"\n" + + "\t\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t],\n" + + "\t\t\t\t\t\"flow\":[\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"nodeId\":\"11195231\",\n" + + "\t\t\t\t\t\t\t\"depends\":[\n" + + "\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\"type\":\"Normal\",\n" + + "\t\t\t\t\t\t\t\t\t\"output\":\"dw_scheduler_pre.11195216_out\"\n" + + "\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"nodeId\":\"11195217\",\n" + + "\t\t\t\t\t\t\t\"depends\":[\n" + + "\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\"type\":\"Normal\",\n" + + "\t\t\t\t\t\t\t\t\t\"output\":\"dw_scheduler_pre.11195231_out\"\n" + + "\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"nodeId\":\"11195216\",\n" + + "\t\t\t\t\t\t\t\"depends\":[\n" + + "\t\t\t\t\t\t\t\t\n" + + "\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t],\n" + + "\t\t\t\t\t\"array\":{\n" + + "\t\t\t\t\t\t\"name\":\"loopDataArray\",\n" + + "\t\t\t\t\t\t\"artifactType\":\"Variable\",\n" + + "\t\t\t\t\t\t\"scope\":\"NodeContext\",\n" + + "\t\t\t\t\t\t\"type\":\"Constant\",\n" + + "\t\t\t\t\t\t\"node\":{\n" + + "\t\t\t\t\t\t\t\"nodeId\":\"11195215\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"referenceVariable\":{\n" + + "\t\t\t\t\t\t\t\"name\":\"outputs\",\n" + + "\t\t\t\t\t\t\t\"artifactType\":\"Variable\",\n" + + "\t\t\t\t\t\t\t\"scope\":\"NodeContext\",\n" + + "\t\t\t\t\t\t\t\"type\":\"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\"node\":{\n" + + "\t\t\t\t\t\t\t\t\"nodeId\":\"11195181\",\n" + + "\t\t\t\t\t\t\t\t\"output\":\"dw_scheduler_pre.11195181_out\"\n" + + "\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t}\n" + + "\t\t\t\t}\n" + + "\t\t\t}\n" + + "\t\t],\n" + + "\t\t\"flow\":[\n" + + "\t\t\t{\n" + + "\t\t\t\t\"nodeId\":\"11195215\",\n" + + "\t\t\t\t\"depends\":[\n" + + "\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\"type\":\"Normal\",\n" + + "\t\t\t\t\t\t\"output\":\"dw_scheduler_pre.11195181_out\"\n" + + "\t\t\t\t\t}\n" + + "\t\t\t\t]\n" + + "\t\t\t}\n" + + "\t\t]\n" + + "\t},\n" + + "\t\"metadata\":{\n" + + "\t\t\"owner\":\"068198\"\n" + + "\t}\n" + + "}"; Specification spec = SpecUtil.parseToDomain(s); log.info("{}", SpecUtil.writeToSpec(spec)); @@ -1116,159 +1126,159 @@ public void testNodeIdMissing() { SpecWriterContext context = new SpecWriterContext(); context.setVersion(spec.getVersion()); log.info("{}", - JSON.toJSONString(SpecUtil.write(((DataWorksWorkflowSpec) spec.getSpec()).getNodes().get(0), context), - Feature.PrettyFormat)); + JSON.toJSONString(SpecUtil.write(((DataWorksWorkflowSpec)spec.getSpec()).getNodes().get(0), context), + Feature.PrettyFormat)); } @Test public void testParseForeach() { String json = "{\n" - + " \"nodes\": [\n" - + " {\n" - + " \"id\": \"d0e36d269ed0414d9acd08149f360129\",\n" - + " \"recurrence\": \"Normal\",\n" - + " \"timeout\": 12,\n" - + " \"instanceMode\": \"T+1\",\n" - + " \"rerunMode\": \"Allowed\",\n" - + " \"rerunTimes\": 3,\n" - + " \"rerunInterval\": 18000,\n" - + " \"script\": {\n" - + " \"path\": \"/遍历节点0/traverse_start\",\n" - + " \"runtime\": {\n" - + " \"engine\": \"GENERAL\",\n" - + " \"command\": \"CONTROLLER_TRAVERSE_START\"\n" - + " },\n" - + " \"parameters\": []\n" - + " },\n" - + " \"trigger\": {\n" - + " \"type\": \"Scheduler\",\n" - + " \"cron\": \"00 00 00 * * ?\",\n" - + " \"startTime\": \"1970-01-01 00:00:00\",\n" - + " \"endTime\": \"9999-01-01 00:00:00\",\n" - + " \"timezone\": \"Asia/Shanghai\"\n" - + " },\n" - + " \"runtimeResource\": {\n" - + " \"resourceGroup\": \"res_group_1\"\n" - + " },\n" - + " \"name\": \"traverse_start\",\n" - + " \"owner\": \"WORKER_1482465063962\",\n" - + " \"inputs\": {},\n" - + " \"outputs\": {\n" - + " \"nodeOutputs\": [\n" - + " {\n" - + " \"artifactType\": \"NodeOutput\",\n" - + " \"data\": \"d0e36d269ed0414d9acd08149f360129\",\n" - + " \"refTableName\": \"traverse_start\"\n" - + " }\n" - + " ]\n" - + " },\n" - + " \"functions\": [],\n" - + " \"fileResources\": []\n" - + " },\n" - + " {\n" - + " \"id\": \"8401efef76224eacbf28cc284b11a788\",\n" - + " \"recurrence\": \"Normal\",\n" - + " \"timeout\": 12,\n" - + " \"instanceMode\": \"T+1\",\n" - + " \"rerunMode\": \"Allowed\",\n" - + " \"rerunTimes\": 3,\n" - + " \"rerunInterval\": 18000,\n" - + " \"script\": {\n" - + " \"path\": \"/遍历节点0/shell\",\n" - + " \"runtime\": {\n" - + " \"engine\": \"GENERAL\",\n" - + " \"command\": \"DIDE_SHELL\"\n" - + " },\n" - + " \"parameters\": []\n" - + " },\n" - + " \"trigger\": {\n" - + " \"type\": \"Scheduler\",\n" - + " \"cron\": \"00 00 00 * * ?\",\n" - + " \"startTime\": \"1970-01-01 00:00:00\",\n" - + " \"endTime\": \"9999-01-01 00:00:00\",\n" - + " \"timezone\": \"Asia/Shanghai\"\n" - + " },\n" - + " \"runtimeResource\": {\n" - + " \"resourceGroup\": \"res_group_1\"\n" - + " },\n" - + " \"name\": \"shell\",\n" - + " \"owner\": \"WORKER_1482465063962\",\n" - + " \"inputs\": {},\n" - + " \"outputs\": {\n" - + " \"nodeOutputs\": [\n" - + " {\n" - + " \"artifactType\": \"NodeOutput\",\n" - + " \"data\": \"8401efef76224eacbf28cc284b11a788\",\n" - + " \"refTableName\": \"shell\"\n" - + " }\n" - + " ]\n" - + " },\n" - + " \"functions\": [],\n" - + " \"fileResources\": []\n" - + " },\n" - + " {\n" - + " \"id\": \"227b06c3ab0549e3b77731b0c828dcec\",\n" - + " \"recurrence\": \"Normal\",\n" - + " \"timeout\": 12,\n" - + " \"instanceMode\": \"T+1\",\n" - + " \"rerunMode\": \"Allowed\",\n" - + " \"rerunTimes\": 3,\n" - + " \"rerunInterval\": 18000,\n" - + " \"script\": {\n" - + " \"path\": \"/遍历节点0/traverse_end\",\n" - + " \"runtime\": {\n" - + " \"engine\": \"GENERAL\",\n" - + " \"command\": \"CONTROLLER_TRAVERSE_END\"\n" - + " },\n" - + " \"parameters\": []\n" - + " },\n" - + " \"trigger\": {\n" - + " \"type\": \"Scheduler\",\n" - + " \"cron\": \"00 00 00 * * ?\",\n" - + " \"startTime\": \"1970-01-01 00:00:00\",\n" - + " \"endTime\": \"9999-01-01 00:00:00\",\n" - + " \"timezone\": \"Asia/Shanghai\"\n" - + " },\n" - + " \"runtimeResource\": {\n" - + " \"resourceGroup\": \"res_group_1\"\n" - + " },\n" - + " \"name\": \"traverse_end\",\n" - + " \"owner\": \"WORKER_1482465063962\",\n" - + " \"inputs\": {},\n" - + " \"outputs\": {\n" - + " \"nodeOutputs\": [\n" - + " {\n" - + " \"artifactType\": \"NodeOutput\",\n" - + " \"data\": \"227b06c3ab0549e3b77731b0c828dcec\",\n" - + " \"refTableName\": \"traverse_end\"\n" - + " }\n" - + " ]\n" - + " },\n" - + " \"functions\": [],\n" - + " \"fileResources\": []\n" - + " }\n" - + " ],\n" - + " \"flow\": [\n" - + " {\n" - + " \"nodeId\": \"8401efef76224eacbf28cc284b11a788\",\n" - + " \"depends\": [\n" - + " {\n" - + " \"nodeId\": \"d0e36d269ed0414d9acd08149f360129\",\n" - + " \"type\": \"Normal\"\n" - + " }\n" - + " ]\n" - + " },\n" - + " {\n" - + " \"nodeId\": \"227b06c3ab0549e3b77731b0c828dcec\",\n" - + " \"depends\": [\n" - + " {\n" - + " \"nodeId\": \"8401efef76224eacbf28cc284b11a788\",\n" - + " \"type\": \"Normal\"\n" - + " }\n" - + " ]\n" - + " }\n" - + " ]\n" - + " }"; + + " \"nodes\": [\n" + + " {\n" + + " \"id\": \"d0e36d269ed0414d9acd08149f360129\",\n" + + " \"recurrence\": \"Normal\",\n" + + " \"timeout\": 12,\n" + + " \"instanceMode\": \"T+1\",\n" + + " \"rerunMode\": \"Allowed\",\n" + + " \"rerunTimes\": 3,\n" + + " \"rerunInterval\": 18000,\n" + + " \"script\": {\n" + + " \"path\": \"/遍历节点0/traverse_start\",\n" + + " \"runtime\": {\n" + + " \"engine\": \"GENERAL\",\n" + + " \"command\": \"CONTROLLER_TRAVERSE_START\"\n" + + " },\n" + + " \"parameters\": []\n" + + " },\n" + + " \"trigger\": {\n" + + " \"type\": \"Scheduler\",\n" + + " \"cron\": \"00 00 00 * * ?\",\n" + + " \"startTime\": \"1970-01-01 00:00:00\",\n" + + " \"endTime\": \"9999-01-01 00:00:00\",\n" + + " \"timezone\": \"Asia/Shanghai\"\n" + + " },\n" + + " \"runtimeResource\": {\n" + + " \"resourceGroup\": \"res_group_1\"\n" + + " },\n" + + " \"name\": \"traverse_start\",\n" + + " \"owner\": \"WORKER_1482465063962\",\n" + + " \"inputs\": {},\n" + + " \"outputs\": {\n" + + " \"nodeOutputs\": [\n" + + " {\n" + + " \"artifactType\": \"NodeOutput\",\n" + + " \"data\": \"d0e36d269ed0414d9acd08149f360129\",\n" + + " \"refTableName\": \"traverse_start\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"functions\": [],\n" + + " \"fileResources\": []\n" + + " },\n" + + " {\n" + + " \"id\": \"8401efef76224eacbf28cc284b11a788\",\n" + + " \"recurrence\": \"Normal\",\n" + + " \"timeout\": 12,\n" + + " \"instanceMode\": \"T+1\",\n" + + " \"rerunMode\": \"Allowed\",\n" + + " \"rerunTimes\": 3,\n" + + " \"rerunInterval\": 18000,\n" + + " \"script\": {\n" + + " \"path\": \"/遍历节点0/shell\",\n" + + " \"runtime\": {\n" + + " \"engine\": \"GENERAL\",\n" + + " \"command\": \"DIDE_SHELL\"\n" + + " },\n" + + " \"parameters\": []\n" + + " },\n" + + " \"trigger\": {\n" + + " \"type\": \"Scheduler\",\n" + + " \"cron\": \"00 00 00 * * ?\",\n" + + " \"startTime\": \"1970-01-01 00:00:00\",\n" + + " \"endTime\": \"9999-01-01 00:00:00\",\n" + + " \"timezone\": \"Asia/Shanghai\"\n" + + " },\n" + + " \"runtimeResource\": {\n" + + " \"resourceGroup\": \"res_group_1\"\n" + + " },\n" + + " \"name\": \"shell\",\n" + + " \"owner\": \"WORKER_1482465063962\",\n" + + " \"inputs\": {},\n" + + " \"outputs\": {\n" + + " \"nodeOutputs\": [\n" + + " {\n" + + " \"artifactType\": \"NodeOutput\",\n" + + " \"data\": \"8401efef76224eacbf28cc284b11a788\",\n" + + " \"refTableName\": \"shell\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"functions\": [],\n" + + " \"fileResources\": []\n" + + " },\n" + + " {\n" + + " \"id\": \"227b06c3ab0549e3b77731b0c828dcec\",\n" + + " \"recurrence\": \"Normal\",\n" + + " \"timeout\": 12,\n" + + " \"instanceMode\": \"T+1\",\n" + + " \"rerunMode\": \"Allowed\",\n" + + " \"rerunTimes\": 3,\n" + + " \"rerunInterval\": 18000,\n" + + " \"script\": {\n" + + " \"path\": \"/遍历节点0/traverse_end\",\n" + + " \"runtime\": {\n" + + " \"engine\": \"GENERAL\",\n" + + " \"command\": \"CONTROLLER_TRAVERSE_END\"\n" + + " },\n" + + " \"parameters\": []\n" + + " },\n" + + " \"trigger\": {\n" + + " \"type\": \"Scheduler\",\n" + + " \"cron\": \"00 00 00 * * ?\",\n" + + " \"startTime\": \"1970-01-01 00:00:00\",\n" + + " \"endTime\": \"9999-01-01 00:00:00\",\n" + + " \"timezone\": \"Asia/Shanghai\"\n" + + " },\n" + + " \"runtimeResource\": {\n" + + " \"resourceGroup\": \"res_group_1\"\n" + + " },\n" + + " \"name\": \"traverse_end\",\n" + + " \"owner\": \"WORKER_1482465063962\",\n" + + " \"inputs\": {},\n" + + " \"outputs\": {\n" + + " \"nodeOutputs\": [\n" + + " {\n" + + " \"artifactType\": \"NodeOutput\",\n" + + " \"data\": \"227b06c3ab0549e3b77731b0c828dcec\",\n" + + " \"refTableName\": \"traverse_end\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"functions\": [],\n" + + " \"fileResources\": []\n" + + " }\n" + + " ],\n" + + " \"flow\": [\n" + + " {\n" + + " \"nodeId\": \"8401efef76224eacbf28cc284b11a788\",\n" + + " \"depends\": [\n" + + " {\n" + + " \"nodeId\": \"d0e36d269ed0414d9acd08149f360129\",\n" + + " \"type\": \"Normal\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"nodeId\": \"227b06c3ab0549e3b77731b0c828dcec\",\n" + + " \"depends\": [\n" + + " {\n" + + " \"nodeId\": \"8401efef76224eacbf28cc284b11a788\",\n" + + " \"type\": \"Normal\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }"; SpecForEach foreach = SpecUtil.parse(json, SpecForEach.class, new SpecParserContext()); log.info("before: {}", json); } @@ -1276,356 +1286,774 @@ public void testParseForeach() { @Test public void testx() { String spec = "{\n" - + "\t\"version\":\"1.1.0\",\n" - + "\t\"kind\":\"TemporaryWorkflow\",\n" - + "\t\"spec\":{\n" - + "\t\t\"nodes\":[\n" - + "\t\t\t{\n" - + "\t\t\t\t\"recurrence\":\"Normal\",\n" - + "\t\t\t\t\"id\":\"5143110377713406119\",\n" - + "\t\t\t\t\"timeout\":0,\n" - + "\t\t\t\t\"instanceMode\":\"T+1\",\n" - + "\t\t\t\t\"rerunMode\":\"Allowed\",\n" - + "\t\t\t\t\"rerunTimes\":3,\n" - + "\t\t\t\t\"rerunInterval\":180000,\n" - + "\t\t\t\t\"script\":{\n" - + "\t\t\t\t\t\"path\":\"聿剑/flow/flow6/f_ge_shell1\",\n" - + "\t\t\t\t\t\"runtime\":{\n" - + "\t\t\t\t\t\t\"command\":\"DIDE_SHELL\"\n" - + "\t\t\t\t\t},\n" - + "\t\t\t\t\t\"content\":\"#!/bin/bash\\n#********************************************************************#\\n##author:聿剑\\n" - + "##create time:2024-04-09 16:05:37\\n#********************************************************************#\\necho $1\",\n" - + "\t\t\t\t\t\"id\":\"6138281211054878711\",\n" - + "\t\t\t\t\t\"parameters\":[\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"name\":\"flow_bizdate\",\n" - + "\t\t\t\t\t\t\t\"artifactType\":\"Variable\",\n" - + "\t\t\t\t\t\t\t\"scope\":\"NodeParameter\",\n" - + "\t\t\t\t\t\t\t\"type\":\"System\",\n" - + "\t\t\t\t\t\t\t\"value\":\"$[yyyymmdd-1]\",\n" - + "\t\t\t\t\t\t\t\"id\":\"6584333807719816392\"\n" - + "\t\t\t\t\t\t}\n" - + "\t\t\t\t\t]\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"trigger\":{\n" - + "\t\t\t\t\t\"type\":\"Scheduler\",\n" - + "\t\t\t\t\t\"id\":\"4752762997864777554\",\n" - + "\t\t\t\t\t\"cron\":\"00 00 00 * * ?\",\n" - + "\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" - + "\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\",\n" - + "\t\t\t\t\t\"timezone\":\"Asia/Shanghai\"\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"runtimeResource\":{\n" - + "\t\t\t\t\t\"resourceGroup\":\"group_2\",\n" - + "\t\t\t\t\t\"id\":\"5623679673296125496\",\n" - + "\t\t\t\t\t\"resourceGroupId\":\"2\"\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"name\":\"f_ge_shell1\",\n" - + "\t\t\t\t\"owner\":\"064152\",\n" - + "\t\t\t\t\"metadata\":{\n" - + "\t\t\t\t\t\"owner\":{\n" - + "\t\t\t\t\t\t\"userId\":\"064152\",\n" - + "\t\t\t\t\t\t\"userName\":\"聿剑\"\n" - + "\t\t\t\t\t},\n" - + "\t\t\t\t\t\"containerUuid\":\"8522335580915008505\"\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"inputs\":{\n" - + "\t\t\t\t\t\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"outputs\":{\n" - + "\t\t\t\t\t\"nodeOutputs\":[\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"data\":\"5143110377713406119\",\n" - + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" - + "\t\t\t\t\t\t\t\"refTableName\":\"f_ge_shell1\",\n" - + "\t\t\t\t\t\t\t\"isDefault\":true\n" - + "\t\t\t\t\t\t}\n" - + "\t\t\t\t\t]\n" - + "\t\t\t\t}\n" - + "\t\t\t},\n" - + "\t\t\t{\n" - + "\t\t\t\t\"recurrence\":\"Normal\",\n" - + "\t\t\t\t\"id\":\"7495526614688319692\",\n" - + "\t\t\t\t\"timeout\":0,\n" - + "\t\t\t\t\"instanceMode\":\"T+1\",\n" - + "\t\t\t\t\"rerunMode\":\"Allowed\",\n" - + "\t\t\t\t\"rerunTimes\":3,\n" - + "\t\t\t\t\"rerunInterval\":180000,\n" - + "\t\t\t\t\"datasource\":{\n" - + "\t\t\t\t\t\"name\":\"odps_first\",\n" - + "\t\t\t\t\t\"type\":\"odps\"\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"script\":{\n" - + "\t\t\t\t\t\"path\":\"聿剑/flow/flow6/f_mc_sql1\",\n" - + "\t\t\t\t\t\"runtime\":{\n" - + "\t\t\t\t\t\t\"command\":\"ODPS_SQL\"\n" - + "\t\t\t\t\t},\n" - + "\t\t\t\t\t\"content\":\"--MaxCompute SQL\\n--********************************************************************--\\n--author: " - + "聿剑\\n--create time: 2024-04-09 10:53:58\\n--********************************************************************--\\nSELECT " - + "'${flow_bizdate}';\\n\",\n" - + "\t\t\t\t\t\"id\":\"5724702094894912201\",\n" - + "\t\t\t\t\t\"parameters\":[\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"name\":\"flow_bizdate\",\n" - + "\t\t\t\t\t\t\t\"artifactType\":\"Variable\",\n" - + "\t\t\t\t\t\t\t\"scope\":\"NodeParameter\",\n" - + "\t\t\t\t\t\t\t\"type\":\"System\",\n" - + "\t\t\t\t\t\t\t\"value\":\"$[yyyymmdd-1]\",\n" - + "\t\t\t\t\t\t\t\"id\":\"6584333807719816392\"\n" - + "\t\t\t\t\t\t}\n" - + "\t\t\t\t\t]\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"trigger\":{\n" - + "\t\t\t\t\t\"type\":\"Scheduler\",\n" - + "\t\t\t\t\t\"id\":\"8888865284073976707\",\n" - + "\t\t\t\t\t\"cron\":\"00 00 00 * * ?\",\n" - + "\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" - + "\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\",\n" - + "\t\t\t\t\t\"timezone\":\"Asia/Shanghai\"\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"runtimeResource\":{\n" - + "\t\t\t\t\t\"resourceGroup\":\"group_2\",\n" - + "\t\t\t\t\t\"id\":\"5623679673296125496\",\n" - + "\t\t\t\t\t\"resourceGroupId\":\"2\"\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"name\":\"f_mc_sql1\",\n" - + "\t\t\t\t\"owner\":\"064152\",\n" - + "\t\t\t\t\"metadata\":{\n" - + "\t\t\t\t\t\"owner\":{\n" - + "\t\t\t\t\t\t\"userId\":\"064152\",\n" - + "\t\t\t\t\t\t\"userName\":\"聿剑\"\n" - + "\t\t\t\t\t},\n" - + "\t\t\t\t\t\"containerUuid\":\"8522335580915008505\"\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"inputs\":{\n" - + "\t\t\t\t\t\n" - + "\t\t\t\t},\n" - + "\t\t\t\t\"outputs\":{\n" - + "\t\t\t\t\t\"nodeOutputs\":[\n" - + "\t\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\t\"data\":\"7495526614688319692\",\n" - + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" - + "\t\t\t\t\t\t\t\"refTableName\":\"f_mc_sql1\",\n" - + "\t\t\t\t\t\t\t\"isDefault\":true\n" - + "\t\t\t\t\t\t}\n" - + "\t\t\t\t\t]\n" - + "\t\t\t\t}\n" - + "\t\t\t}\n" - + "\t\t],\n" - + "\t\t\"flow\":[\n" - + "\t\t\t{\n" - + "\t\t\t\t\"nodeId\":\"5143110377713406119\",\n" - + "\t\t\t\t\"depends\":[\n" - + "\t\t\t\t\t{\n" - + "\t\t\t\t\t\t\"type\":\"Normal\",\n" - + "\t\t\t\t\t\t\"output\":\"7495526614688319692\"\n" - + "\t\t\t\t\t}\n" - + "\t\t\t\t]\n" - + "\t\t\t}\n" - + "\t\t],\n" - + "\t\t\"variables\":[\n" - + "\t\t\t{\n" - + "\t\t\t\t\"name\":\"flow_bizdate\",\n" - + "\t\t\t\t\"artifactType\":\"Variable\",\n" - + "\t\t\t\t\"scope\":\"NodeParameter\",\n" - + "\t\t\t\t\"type\":\"System\",\n" - + "\t\t\t\t\"value\":\"$[yyyymmdd-1]\"\n" - + "\t\t\t}\n" - + "\t\t]\n" - + "\t},\n" - + "\t\"metadata\":{\n" - + "\t\t\"owner\":{\n" - + "\t\t\t\"userId\":\"064152\",\n" - + "\t\t\t\"userName\":\"聿剑\"\n" - + "\t\t},\n" - + "\t\t\"name\":\"fullflow2\",\n" - + "\t\t\"tenantId\":\"1\",\n" - + "\t\t\"type\":\"CycleWorkflow\",\n" - + "\t\t\"uuid\":\"8522335580915008505\",\n" - + "\t\t\"projectId\":\"23620\"\n" - + "\t}\n" - + "}"; + + "\t\"version\":\"1.1.0\",\n" + + "\t\"kind\":\"TemporaryWorkflow\",\n" + + "\t\"spec\":{\n" + + "\t\t\"nodes\":[\n" + + "\t\t\t{\n" + + "\t\t\t\t\"recurrence\":\"Normal\",\n" + + "\t\t\t\t\"id\":\"5143110377713406119\",\n" + + "\t\t\t\t\"timeout\":0,\n" + + "\t\t\t\t\"instanceMode\":\"T+1\",\n" + + "\t\t\t\t\"rerunMode\":\"Allowed\",\n" + + "\t\t\t\t\"rerunTimes\":3,\n" + + "\t\t\t\t\"rerunInterval\":180000,\n" + + "\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\"path\":\"聿剑/flow/flow6/f_ge_shell1\",\n" + + "\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\"command\":\"DIDE_SHELL\"\n" + + "\t\t\t\t\t},\n" + + "\t\t\t\t\t\"content\":\"#!/bin/bash\\n#********************************************************************#\\n##author:聿剑\\n" + + "##create time:2024-04-09 16:05:37\\n#********************************************************************#\\necho $1\",\n" + + "\t\t\t\t\t\"id\":\"6138281211054878711\",\n" + + "\t\t\t\t\t\"parameters\":[\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"name\":\"flow_bizdate\",\n" + + "\t\t\t\t\t\t\t\"artifactType\":\"Variable\",\n" + + "\t\t\t\t\t\t\t\"scope\":\"NodeParameter\",\n" + + "\t\t\t\t\t\t\t\"type\":\"System\",\n" + + "\t\t\t\t\t\t\t\"value\":\"$[yyyymmdd-1]\",\n" + + "\t\t\t\t\t\t\t\"id\":\"6584333807719816392\"\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t]\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"trigger\":{\n" + + "\t\t\t\t\t\"type\":\"Scheduler\",\n" + + "\t\t\t\t\t\"id\":\"4752762997864777554\",\n" + + "\t\t\t\t\t\"cron\":\"00 00 00 * * ?\",\n" + + "\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\",\n" + + "\t\t\t\t\t\"timezone\":\"Asia/Shanghai\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"runtimeResource\":{\n" + + "\t\t\t\t\t\"resourceGroup\":\"group_2\",\n" + + "\t\t\t\t\t\"id\":\"5623679673296125496\",\n" + + "\t\t\t\t\t\"resourceGroupId\":\"2\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"name\":\"f_ge_shell1\",\n" + + "\t\t\t\t\"owner\":\"064152\",\n" + + "\t\t\t\t\"metadata\":{\n" + + "\t\t\t\t\t\"owner\":{\n" + + "\t\t\t\t\t\t\"userId\":\"064152\",\n" + + "\t\t\t\t\t\t\"userName\":\"聿剑\"\n" + + "\t\t\t\t\t},\n" + + "\t\t\t\t\t\"containerUuid\":\"8522335580915008505\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"inputs\":{\n" + + "\t\t\t\t\t\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"outputs\":{\n" + + "\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"data\":\"5143110377713406119\",\n" + + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\"refTableName\":\"f_ge_shell1\",\n" + + "\t\t\t\t\t\t\t\"isDefault\":true\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t]\n" + + "\t\t\t\t}\n" + + "\t\t\t},\n" + + "\t\t\t{\n" + + "\t\t\t\t\"recurrence\":\"Normal\",\n" + + "\t\t\t\t\"id\":\"7495526614688319692\",\n" + + "\t\t\t\t\"timeout\":0,\n" + + "\t\t\t\t\"instanceMode\":\"T+1\",\n" + + "\t\t\t\t\"rerunMode\":\"Allowed\",\n" + + "\t\t\t\t\"rerunTimes\":3,\n" + + "\t\t\t\t\"rerunInterval\":180000,\n" + + "\t\t\t\t\"datasource\":{\n" + + "\t\t\t\t\t\"name\":\"odps_first\",\n" + + "\t\t\t\t\t\"type\":\"odps\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\"path\":\"聿剑/flow/flow6/f_mc_sql1\",\n" + + "\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\"command\":\"ODPS_SQL\"\n" + + "\t\t\t\t\t},\n" + + "\t\t\t\t\t\"content\":\"--MaxCompute SQL\\n--********************************************************************--\\n--author: " + + "聿剑\\n--create time: 2024-04-09 10:53:58\\n--********************************************************************--\\nSELECT " + + "'${flow_bizdate}';\\n\",\n" + + "\t\t\t\t\t\"id\":\"5724702094894912201\",\n" + + "\t\t\t\t\t\"parameters\":[\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"name\":\"flow_bizdate\",\n" + + "\t\t\t\t\t\t\t\"artifactType\":\"Variable\",\n" + + "\t\t\t\t\t\t\t\"scope\":\"NodeParameter\",\n" + + "\t\t\t\t\t\t\t\"type\":\"System\",\n" + + "\t\t\t\t\t\t\t\"value\":\"$[yyyymmdd-1]\",\n" + + "\t\t\t\t\t\t\t\"id\":\"6584333807719816392\"\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t]\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"trigger\":{\n" + + "\t\t\t\t\t\"type\":\"Scheduler\",\n" + + "\t\t\t\t\t\"id\":\"8888865284073976707\",\n" + + "\t\t\t\t\t\"cron\":\"00 00 00 * * ?\",\n" + + "\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\",\n" + + "\t\t\t\t\t\"timezone\":\"Asia/Shanghai\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"runtimeResource\":{\n" + + "\t\t\t\t\t\"resourceGroup\":\"group_2\",\n" + + "\t\t\t\t\t\"id\":\"5623679673296125496\",\n" + + "\t\t\t\t\t\"resourceGroupId\":\"2\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"name\":\"f_mc_sql1\",\n" + + "\t\t\t\t\"owner\":\"064152\",\n" + + "\t\t\t\t\"metadata\":{\n" + + "\t\t\t\t\t\"owner\":{\n" + + "\t\t\t\t\t\t\"userId\":\"064152\",\n" + + "\t\t\t\t\t\t\"userName\":\"聿剑\"\n" + + "\t\t\t\t\t},\n" + + "\t\t\t\t\t\"containerUuid\":\"8522335580915008505\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"inputs\":{\n" + + "\t\t\t\t\t\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"outputs\":{\n" + + "\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"data\":\"7495526614688319692\",\n" + + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\"refTableName\":\"f_mc_sql1\",\n" + + "\t\t\t\t\t\t\t\"isDefault\":true\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t]\n" + + "\t\t\t\t}\n" + + "\t\t\t}\n" + + "\t\t],\n" + + "\t\t\"flow\":[\n" + + "\t\t\t{\n" + + "\t\t\t\t\"nodeId\":\"5143110377713406119\",\n" + + "\t\t\t\t\"depends\":[\n" + + "\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\"type\":\"Normal\",\n" + + "\t\t\t\t\t\t\"output\":\"7495526614688319692\"\n" + + "\t\t\t\t\t}\n" + + "\t\t\t\t]\n" + + "\t\t\t}\n" + + "\t\t],\n" + + "\t\t\"variables\":[\n" + + "\t\t\t{\n" + + "\t\t\t\t\"name\":\"flow_bizdate\",\n" + + "\t\t\t\t\"artifactType\":\"Variable\",\n" + + "\t\t\t\t\"scope\":\"NodeParameter\",\n" + + "\t\t\t\t\"type\":\"System\",\n" + + "\t\t\t\t\"value\":\"$[yyyymmdd-1]\"\n" + + "\t\t\t}\n" + + "\t\t]\n" + + "\t},\n" + + "\t\"metadata\":{\n" + + "\t\t\"owner\":{\n" + + "\t\t\t\"userId\":\"064152\",\n" + + "\t\t\t\"userName\":\"聿剑\"\n" + + "\t\t},\n" + + "\t\t\"name\":\"fullflow2\",\n" + + "\t\t\"tenantId\":\"1\",\n" + + "\t\t\"type\":\"CycleWorkflow\",\n" + + "\t\t\"uuid\":\"8522335580915008505\",\n" + + "\t\t\"projectId\":\"23620\"\n" + + "\t}\n" + + "}"; Specification resp = SpecUtil.parseToDomain(spec); log.info("resp: {}", resp); Assert.assertNotNull(resp); } + @Test + public void testParseNodeWithComponent() { + String spec = "{\n" + + " \"version\": \"1.1.0\",\n" + + " \"kind\": \"CycleWorkflow\",\n" + + " \"spec\": {\n" + + " \"nodes\": [\n" + + " {\n" + + " \"recurrence\": \"Normal\",\n" + + " \"id\": \"6289081068484952005\",\n" + + " \"timeout\": 0,\n" + + " \"instanceMode\": \"T+1\",\n" + + " \"rerunMode\": \"Allowed\",\n" + + " \"rerunTimes\": 3,\n" + + " \"rerunInterval\": 180000,\n" + + " \"datasource\": {\n" + + " \"name\": \"odps_first\",\n" + + " \"type\": \"odps\"\n" + + " },\n" + + " \"script\": {\n" + + " \"language\": \"odps-sql\",\n" + + " \"path\": \"昊祯/组件/c1\",\n" + + " \"runtime\": {\n" + + " \"command\": \"COMPONENT_SQL\",\n" + + " \"commandTypeId\": 1010\n" + + " },\n" + + " \"id\": \"6423534013528078585\"\n" + + " },\n" + + " \"trigger\": {\n" + + " \"type\": \"Scheduler\",\n" + + " \"id\": \"5065170306719262538\",\n" + + " \"cron\": \"00 00 00 * * ?\",\n" + + " \"startTime\": \"1970-01-01 00:00:00\",\n" + + " \"endTime\": \"9999-01-01 00:00:00\",\n" + + " \"timezone\": \"Asia/Shanghai\"\n" + + " },\n" + + " \"runtimeResource\": {\n" + + " \"resourceGroup\": \"wengzi_test\",\n" + + " \"id\": \"5700220827937093292\",\n" + + " \"resourceGroupId\": \"9527\"\n" + + " },\n" + + " \"name\": \"c1\",\n" + + " \"owner\": \"067848\",\n" + + " \"component\": {\n" + + " \"description\": \"11\",\n" + + " \"id\": \"6128718817130431653\",\n" + + " \"inputs\": [\n" + + " {\n" + + " \"name\": \"p1\"\n" + + " },\n" + + " {\n" + + " \"name\": \"p2\"\n" + + " }\n" + + " ],\n" + + " \"metadata\": {\n" + + " \"version\": \"3\"\n" + + " },\n" + + " \"name\": \"c1\",\n" + + " \"outputs\": [],\n" + + " \"owner\": \"067848\"\n" + + " },\n" + + " \"metadata\": {\n" + + " \"tenantId\": \"1\",\n" + + " \"projectId\": \"23620\"\n" + + " },\n" + + " \"inputs\": {\n" + + " \"nodeOutputs\": [\n" + + " {\n" + + " \"data\": \"dw_scheduler_pre_root\",\n" + + " \"artifactType\": \"NodeOutput\",\n" + + " \"isDefault\": false\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"outputs\": {\n" + + " \"nodeOutputs\": [\n" + + " {\n" + + " \"data\": \"6289081068484952005\",\n" + + " \"artifactType\": \"NodeOutput\",\n" + + " \"refTableName\": \"c1\",\n" + + " \"isDefault\": true\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"flow\": [\n" + + " {\n" + + " \"nodeId\": \"6289081068484952005\",\n" + + " \"depends\": [\n" + + " {\n" + + " \"type\": \"Normal\",\n" + + " \"output\": \"dw_scheduler_pre_root\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"metadata\": {\n" + + " \"uuid\": \"6289081068484952005\"\n" + + " }\n" + + " }"; + + Specification specification = SpecUtil.parseToDomain(spec); + Optional component = Optional.ofNullable(specification) + .map(Specification::getSpec).map(DataWorksWorkflowSpec::getNodes) + .flatMap(l -> l.stream().map(SpecNode::getComponent).filter(Objects::nonNull).findAny()); + + Assert.assertTrue(component.isPresent()); + Assert.assertNotNull(component.get().getMetadata()); + Assert.assertNotNull(component.get().getId()); + Assert.assertNotNull(component.get().getInputs()); + Assert.assertNotNull(component.get().getOutputs()); + } + + @Test + public void testWorkflow() { + String spec = "{\n" + + " \"version\": \"1.1.0\",\n" + + " \"kind\": \"CycleWorkflow\",\n" + + " \"spec\": {\n" + + " \"workflows\": [\n" + + " {\n" + + " \"id\": \"flow_1\",\n" + + " \"name\": \"flow_1\",\n" + + " \"strategy\": {\n" + + " \"failureStrategy\": \"Continue\"\n" + + " },\n" + + " \"script\": {\n" + + " \"runtime\": {\n" + + " \"command\": \"XxxWorkflow\",\n" + + " \"commandTypeId\": 1111\n" + + " },\n" + + " \"parameters\": [\n" + + " {\n" + + " \"name\": \"p1\",\n" + + " \"type\": \"System\",\n" + + " \"scope\": \"Workflow\",\n" + + " \"value\": \"$[yyyymmdd]\"\n" + + " },\n" + + " {\n" + + " \"name\": \"p2\",\n" + + " \"type\": \"Constant\",\n" + + " \"scope\": \"Workflow\",\n" + + " \"value\": \"ppp2\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"runtimeResource\": {\n" + + " \"resourceGroup\": \"group_xxx\"\n" + + " },\n" + + " \"trigger\": {\n" + + " \"type\": \"Scheduler\",\n" + + " \"cron\": \"00 00 00 * * ?\",\n" + + " \"delaySeconds\": 10\n" + + " },\n" + + " \"inputs\": {},\n" + + " \"outputs\": {\n" + + " \"nodeOutputs\": [\n" + + " {\n" + + " \"data\": \"autotest.workflow_1_xxx\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"nodes\": [\n" + + " {\n" + + " \"id\": \"inner_node_1\",\n" + + " \"name\": \"inner_node_1\",\n" + + " \"script\": {\n" + + " \"runtime\": {\n" + + " \"command\": \"ODPS_SQL\"\n" + + " }\n" + + " },\n" + + " \"trigger\": {\n" + + " \"delay\": 10\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"dependencies\": [\n" + + " {\n" + + " \"nodeId\": \"inner_node_1\",\n" + + " \"depends\": [\n" + + " {\n" + + " \"type\": \"Normal\",\n" + + " \"output\": \"autotest.inner_node_2_out\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }\n" + + " ],\n" + + " \"flow\": [\n" + + " {\n" + + " \"nodeId\": \"flow_1\",\n" + + " \"depends\": [\n" + + " {\n" + + " \"type\": \"Normal\",\n" + + " \"output\": \"autotest.node_1_xxx\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + log.info("spec: {}", spec); + Specification specification = SpecUtil.parseToDomain(spec); + List workflows = specification.getSpec().getWorkflows(); + Assert.assertNotNull(workflows); + Assert.assertEquals(1, workflows.size()); + + log.info("workflows: {}", workflows); + log.info("write spec: {}", SpecUtil.writeToSpec(specification)); + SpecWorkflow specWorkflow = workflows.get(0); + Assert.assertNotNull(specWorkflow.getTrigger()); + Assert.assertNotNull(specWorkflow.getStrategy()); + Assert.assertNotNull(specWorkflow.getNodes()); + Assert.assertNotNull(specWorkflow.getDependencies()); + Assert.assertNotNull(specification.getSpec().getFlow()); + Assert.assertNotNull(specWorkflow.getStrategy().getFailureStrategy()); + } + + @Test + public void testParseDepends() { + String json = "[\n" + + " {\n" + + " \"nodeId\": \"123\"\n" + + " },\n" + + " {\n" + + " \"output\": \"autotest.1234_out\"\n" + + " }\n" + + "]"; + List depends = ListUtils.emptyIfNull(JSONArray.parseArray(json)).stream().map(obj -> { + SpecDepend dep = SpecUtil.parse(JSON.toJSONString(obj), SpecDepend.class, new SpecParserContext()); + log.info("dep: nodeId: {}, output: {}", Optional.ofNullable(dep.getNodeId()).map(SpecRefEntity::getId).orElse(null), dep.getOutput()); + return dep; + }).collect(Collectors.toList()); + + Assert.assertEquals(2, CollectionUtils.size(depends)); + Assert.assertEquals("123", depends.get(0).getNodeId().getId()); + Assert.assertEquals("autotest.1234_out", depends.get(1).getOutput().getData()); + } + + @Test + public void testSpecWorkflow() { + Specification specification = new Specification<>(); + specification.setVersion(SpecVersion.V_1_1_0.getLabel()); + specification.setKind(SpecKind.CYCLE_WORKFLOW.getLabel()); + DataWorksWorkflowSpec spec = new DataWorksWorkflowSpec(); + specification.setSpec(spec); + SpecWorkflow specWorkflow = new SpecWorkflow(); + specWorkflow.setId("12"); + specWorkflow.setName("test"); + spec.setWorkflows(Collections.singletonList(specWorkflow)); + log.info("{}", SpecUtil.writeToSpec(specification)); + + Specification parsed = SpecUtil.parseToDomain(SpecUtil.writeToSpec(specification)); + Assert.assertNotNull(parsed); + Assert.assertNotNull(parsed.getSpec()); + Assert.assertNotNull(parsed.getSpec().getWorkflows()); + Assert.assertTrue(CollectionUtils.isNotEmpty(parsed.getSpec().getWorkflows())); + SpecWorkflow s = parsed.getSpec().getWorkflows().get(0); + Assert.assertNotNull(s); + Assert.assertNotNull(s.getNodes()); + Assert.assertNotNull(s.getDependencies()); + } + + @Test + public void testSpecSchedulerStrategy() { + SpecScheduleStrategy scheduleStrategy = new SpecScheduleStrategy(); + scheduleStrategy.setFailureStrategy(FailureStrategy.CONTINUE); + JSONObject js = (JSONObject)SpecUtil.write(scheduleStrategy, new SpecWriterContext()); + log.info("js: {}", js.toJSONString()); + Assert.assertEquals("Continue", js.getString("failureStrategy")); + } + @Test public void testSingleNode() { String spec = "{\n" - + " \"version\":\"1.1.0\",\n" - + " \"kind\":\"Node\",\n" - + " \"spec\":{\n" - + " \"node\": {\n" - + " \"id\": \"4744170535163410393\",\n" - + " \"recurrence\":\"Normal\",\n" - + " \"timeout\":0,\n" - + " \"instanceMode\":\"T+1\",\n" - + " \"rerunMode\":\"Allowed\",\n" - + " \"rerunTimes\":3,\n" - + " \"rerunInterval\":180000,\n" - + " \"owner\": \"064152\",\n" - + " \"script\":{\n" - + " \"path\":\"聿剑/dep1\",\n" - + " \"language\":\"odps-sql\",\n" - + " \"runtime\":{\n" - + " \"command\":\"ODPS_SQL\"\n" - + " }\n" - + " },\n" - + " \"trigger\":{\n" - + " \"type\":\"Scheduler\",\n" - + " \"cron\":\"00 00 00 * * ?\",\n" - + " \"startTime\":\"1970-01-01 00:00:00\",\n" - + " \"endTime\":\"9999-01-01 00:00:00\",\n" - + " \"timezone\":\"Asia/Shanghai\"\n" - + " },\n" - + " \"runtimeResource\":{\n" - + " \"resourceGroup\":\"group_2\",\n" - + " \"resourceGroupId\":\"2\"\n" - + " },\n" - + " \"name\":\"dep1\"\n" - + " } , \n" - + " \"flow\":\n" - + " { \n" - + " \"nodeId\":\"1\",\n" - + " \"depends\":[\n" - + " {\n" - + " \"type\":\"Normal\",\n" - + " \"output\":\"4744170535163410393\",\n" - + " \"refTableName\":\"branch_2_pyodps\"\n" - + " },\n" - + " {\n" - + " \"type\":\"Normal\",\n" - + " \"output\":\"5910844902278897501\",\n" - + " \"refTableName\":\"branch_1_odpssql\"\n" - + " }\n" - + " ]\n" - + " }\n" - + " \n" - + " }\n" - + " }"; + + " \"version\":\"1.1.0\",\n" + + " \"kind\":\"Node\",\n" + + " \"spec\":{\n" + + " \"nodes\": [{\n" + + " \"id\": \"4744170535163410393\",\n" + + " \"recurrence\":\"Normal\",\n" + + " \"timeout\":0,\n" + + " \"instanceMode\":\"T+1\",\n" + + " \"rerunMode\":\"Allowed\",\n" + + " \"rerunTimes\":3,\n" + + " \"rerunInterval\":180000,\n" + + " \"owner\": \"064152\",\n" + + " \"script\":{\n" + + " \"path\":\"聿剑/dep1\",\n" + + " \"language\":\"odps-sql\",\n" + + " \"runtime\":{\n" + + " \"command\":\"ODPS_SQL\"\n" + + " }\n" + + " },\n" + + " \"trigger\":{\n" + + " \"type\":\"Scheduler\",\n" + + " \"cron\":\"00 00 00 * * ?\",\n" + + " \"startTime\":\"1970-01-01 00:00:00\",\n" + + " \"endTime\":\"9999-01-01 00:00:00\",\n" + + " \"timezone\":\"Asia/Shanghai\"\n" + + " },\n" + + " \"runtimeResource\":{\n" + + " \"resourceGroup\":\"group_2\",\n" + + " \"resourceGroupId\":\"2\"\n" + + " },\n" + + " \"name\":\"dep1\"\n" + + " }] , \n" + + " \"flow\":\n" + + " [{ \n" + + " \"nodeId\":\"1\",\n" + + " \"depends\":[\n" + + " {\n" + + " \"type\":\"Normal\",\n" + + " \"output\":\"4744170535163410393\",\n" + + " \"refTableName\":\"branch_2_pyodps\"\n" + + " },\n" + + " {\n" + + " \"type\":\"Normal\",\n" + + " \"output\":\"5910844902278897501\",\n" + + " \"refTableName\":\"branch_1_odpssql\"\n" + + " }\n" + + " ]\n" + + " }]\n" + + " \n" + + " }\n" + + " }"; Specification specObj = SpecUtil.parseToDomain(spec); SpecWriterContext context = new SpecWriterContext(); context.setVersion("1.1.0"); log.info("write spec: {}", SpecUtil.writeToSpec(specObj)); Assert.assertNotNull(specObj); - Assert.assertTrue(specObj.getSpec() instanceof DataWorksNodeSpec); - Assert.assertNotNull(((DataWorksNodeSpec) specObj.getSpec()).getNode()); - Assert.assertNotNull(((DataWorksNodeSpec) specObj.getSpec()).getFlow()); + Assert.assertTrue(CollectionUtils.isNotEmpty(((DataWorksWorkflowSpec)specObj.getSpec()).getNodes())); + Assert.assertTrue(CollectionUtils.isNotEmpty(((DataWorksWorkflowSpec)specObj.getSpec()).getFlow())); } @Test - public void testParseNodeWithComponent() { + public void testWorkflows() { String spec = "{\n" - + " \"version\": \"1.1.0\",\n" - + " \"kind\": \"CycleWorkflow\",\n" - + " \"spec\": {\n" - + " \"nodes\": [\n" - + " {\n" - + " \"recurrence\": \"Normal\",\n" - + " \"id\": \"6289081068484952005\",\n" - + " \"timeout\": 0,\n" - + " \"instanceMode\": \"T+1\",\n" - + " \"rerunMode\": \"Allowed\",\n" - + " \"rerunTimes\": 3,\n" - + " \"rerunInterval\": 180000,\n" - + " \"datasource\": {\n" - + " \"name\": \"odps_first\",\n" - + " \"type\": \"odps\"\n" - + " },\n" - + " \"script\": {\n" - + " \"language\": \"odps-sql\",\n" - + " \"path\": \"昊祯/组件/c1\",\n" - + " \"runtime\": {\n" - + " \"command\": \"COMPONENT_SQL\",\n" - + " \"commandTypeId\": 1010\n" - + " },\n" - + " \"id\": \"6423534013528078585\"\n" - + " },\n" - + " \"trigger\": {\n" - + " \"type\": \"Scheduler\",\n" - + " \"id\": \"5065170306719262538\",\n" - + " \"cron\": \"00 00 00 * * ?\",\n" - + " \"startTime\": \"1970-01-01 00:00:00\",\n" - + " \"endTime\": \"9999-01-01 00:00:00\",\n" - + " \"timezone\": \"Asia/Shanghai\"\n" - + " },\n" - + " \"runtimeResource\": {\n" - + " \"resourceGroup\": \"wengzi_test\",\n" - + " \"id\": \"5700220827937093292\",\n" - + " \"resourceGroupId\": \"9527\"\n" - + " },\n" - + " \"name\": \"c1\",\n" - + " \"owner\": \"067848\",\n" - + " \"component\": {\n" - + " \"description\": \"11\",\n" - + " \"id\": \"6128718817130431653\",\n" - + " \"inputs\": [\n" - + " {\n" - + " \"name\": \"p1\"\n" - + " },\n" - + " {\n" - + " \"name\": \"p2\"\n" - + " }\n" - + " ],\n" - + " \"metadata\": {\n" - + " \"version\": \"3\"\n" - + " },\n" - + " \"name\": \"c1\",\n" - + " \"outputs\": [],\n" - + " \"owner\": \"067848\"\n" - + " },\n" - + " \"metadata\": {\n" - + " \"tenantId\": \"1\",\n" - + " \"projectId\": \"23620\"\n" - + " },\n" - + " \"inputs\": {\n" - + " \"nodeOutputs\": [\n" - + " {\n" - + " \"data\": \"dw_scheduler_pre_root\",\n" - + " \"artifactType\": \"NodeOutput\",\n" - + " \"isDefault\": false\n" - + " }\n" - + " ]\n" - + " },\n" - + " \"outputs\": {\n" - + " \"nodeOutputs\": [\n" - + " {\n" - + " \"data\": \"6289081068484952005\",\n" - + " \"artifactType\": \"NodeOutput\",\n" - + " \"refTableName\": \"c1\",\n" - + " \"isDefault\": true\n" - + " }\n" - + " ]\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"flow\": [\n" - + " {\n" - + " \"nodeId\": \"6289081068484952005\",\n" - + " \"depends\": [\n" - + " {\n" - + " \"type\": \"Normal\",\n" - + " \"output\": \"dw_scheduler_pre_root\"\n" - + " }\n" - + " ]\n" - + " }\n" - + " ]\n" - + " },\n" - + " \"metadata\": {\n" - + " \"uuid\": \"6289081068484952005\"\n" - + " }\n" - + " }"; - + + "\t\"version\":\"1.1.0\",\n" + + "\t\"kind\":\"CycleWorkflow\",\n" + + "\t\"spec\":{\n" + + "\t\t\"name\":\"testflow0730_deploy_01\",\n" + + "\t\t\"id\":\"8620630926993095479\",\n" + + "\t\t\"type\":\"CycleWorkflow\",\n" + + "\t\t\"owner\":\"1107550004253538\",\n" + + "\t\t\"workflows\":[\n" + + "\t\t\t{\n" + + "\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\"path\":\"聿剑/testflow0730_deploy_01\",\n" + + "\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\"command\":\"WORKFLOW\"\n" + + "\t\t\t\t\t},\n" + + "\t\t\t\t\t\"id\":\"5162322698918001755\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"id\":\"8620630926993095479\",\n" + + "\t\t\t\t\"trigger\":{\n" + + "\t\t\t\t\t\"type\":\"Scheduler\",\n" + + "\t\t\t\t\t\"id\":\"5971686020768809793\",\n" + + "\t\t\t\t\t\"cron\":\"00 00 00 * * ?\",\n" + + "\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\",\n" + + "\t\t\t\t\t\"timezone\":\"Asia/Shanghai\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"strategy\":{\n" + + "\t\t\t\t\t\"timeout\":0,\n" + + "\t\t\t\t\t\"instanceMode\":\"T+1\",\n" + + "\t\t\t\t\t\"rerunMode\":\"Allowed\",\n" + + "\t\t\t\t\t\"rerunTimes\":3,\n" + + "\t\t\t\t\t\"rerunInterval\":180000,\n" + + "\t\t\t\t\t\"failureStrategy\":\"Break\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"name\":\"testflow0730_deploy_01\",\n" + + "\t\t\t\t\"owner\":\"1107550004253538\",\n" + + "\t\t\t\t\"inputs\":{\n" + + "\t\t\t\t\t\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"outputs\":{\n" + + "\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"data\":\"8620630926993095479\",\n" + + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\"refTableName\":\"testflow0730_deploy_01\",\n" + + "\t\t\t\t\t\t\t\"isDefault\":true\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t]\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"nodes\":[\n" + + "\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\"recurrence\":\"Normal\",\n" + + "\t\t\t\t\t\t\"id\":\"7751009343504221738\",\n" + + "\t\t\t\t\t\t\"timeout\":0,\n" + + "\t\t\t\t\t\t\"instanceMode\":\"T+1\",\n" + + "\t\t\t\t\t\t\"rerunMode\":\"Allowed\",\n" + + "\t\t\t\t\t\t\"rerunTimes\":3,\n" + + "\t\t\t\t\t\t\"rerunInterval\":180000,\n" + + "\t\t\t\t\t\t\"datasource\":{\n" + + "\t\t\t\t\t\t\t\"name\":\"odps_first\",\n" + + "\t\t\t\t\t\t\t\"type\":\"\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\t\t\"path\":\"聿剑/testflow0730_deploy_01/mcsql_inner_02\",\n" + + "\t\t\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\t\t\"command\":\"ODPS_SQL\",\n" + + "\t\t\t\t\t\t\t\t\"commandTypeId\":10\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"id\":\"4646522489197098297\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"trigger\":{\n" + + "\t\t\t\t\t\t\t\"type\":\"Scheduler\",\n" + + "\t\t\t\t\t\t\t\"id\":\"6882497775385480901\",\n" + + "\t\t\t\t\t\t\t\"cron\":\"00 00 00 * * ?\",\n" + + "\t\t\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\"timezone\":\"Asia/Shanghai\",\n" + + "\t\t\t\t\t\t\t\"delaySeconds\":0\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"runtimeResource\":{\n" + + "\t\t\t\t\t\t\t\"resourceGroup\":\"S_res_group_524257424564736_1681266742041\",\n" + + "\t\t\t\t\t\t\t\"id\":\"8099134362653965803\",\n" + + "\t\t\t\t\t\t\t\"resourceGroupId\":\"57214326\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"name\":\"mcsql_inner_02\",\n" + + "\t\t\t\t\t\t\"owner\":\"1107550004253538\",\n" + + "\t\t\t\t\t\t\"metadata\":{\n" + + "\t\t\t\t\t\t\t\"container\":{\n" + + "\t\t\t\t\t\t\t\t\"type\":\"Flow\",\n" + + "\t\t\t\t\t\t\t\t\"uuid\":\"8620630926993095479\"\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"tenantId\":\"524257424564736\",\n" + + "\t\t\t\t\t\t\t\"projectId\":\"295425\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"inputs\":{\n" + + "\t\t\t\t\t\t\t\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"outputs\":{\n" + + "\t\t\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\"data\":\"7751009343504221738\",\n" + + "\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\t\t\"refTableName\":\"mcsql_inner_02\",\n" + + "\t\t\t\t\t\t\t\t\t\"isDefault\":true\n" + + "\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t},\n" + + "\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\"recurrence\":\"Normal\",\n" + + "\t\t\t\t\t\t\"id\":\"5641599010392971076\",\n" + + "\t\t\t\t\t\t\"timeout\":0,\n" + + "\t\t\t\t\t\t\"instanceMode\":\"T+1\",\n" + + "\t\t\t\t\t\t\"rerunMode\":\"Allowed\",\n" + + "\t\t\t\t\t\t\"rerunTimes\":3,\n" + + "\t\t\t\t\t\t\"rerunInterval\":180000,\n" + + "\t\t\t\t\t\t\"datasource\":{\n" + + "\t\t\t\t\t\t\t\"name\":\"odps_first\",\n" + + "\t\t\t\t\t\t\t\"type\":\"\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\t\t\"path\":\"聿剑/testflow0730_deploy_01/mcsql_inner_01\",\n" + + "\t\t\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\t\t\"command\":\"ODPS_SQL\",\n" + + "\t\t\t\t\t\t\t\t\"commandTypeId\":10\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"id\":\"7359544718446803942\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"trigger\":{\n" + + "\t\t\t\t\t\t\t\"type\":\"Scheduler\",\n" + + "\t\t\t\t\t\t\t\"id\":\"5396511791028633183\",\n" + + "\t\t\t\t\t\t\t\"cron\":\"00 00 00 * * ?\",\n" + + "\t\t\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\"timezone\":\"Asia/Shanghai\",\n" + + "\t\t\t\t\t\t\t\"delaySeconds\":0\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"runtimeResource\":{\n" + + "\t\t\t\t\t\t\t\"resourceGroup\":\"S_res_group_524257424564736_1681266742041\",\n" + + "\t\t\t\t\t\t\t\"id\":\"8099134362653965803\",\n" + + "\t\t\t\t\t\t\t\"resourceGroupId\":\"57214326\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"name\":\"mcsql_inner_01\",\n" + + "\t\t\t\t\t\t\"owner\":\"1107550004253538\",\n" + + "\t\t\t\t\t\t\"metadata\":{\n" + + "\t\t\t\t\t\t\t\"container\":{\n" + + "\t\t\t\t\t\t\t\t\"type\":\"Flow\",\n" + + "\t\t\t\t\t\t\t\t\"uuid\":\"8620630926993095479\"\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"tenantId\":\"524257424564736\",\n" + + "\t\t\t\t\t\t\t\"projectId\":\"295425\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"inputs\":{\n" + + "\t\t\t\t\t\t\t\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"outputs\":{\n" + + "\t\t\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\"data\":\"5641599010392971076\",\n" + + "\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\t\t\"refTableName\":\"mcsql_inner_01\",\n" + + "\t\t\t\t\t\t\t\t\t\"isDefault\":true\n" + + "\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t}\n" + + "\t\t\t\t],\n" + + "\t\t\t\t\"dependencies\":[\n" + + "\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\"nodeId\":\"7751009343504221738\",\n" + + "\t\t\t\t\t\t\"depends\":[\n" + + "\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\"type\":\"Normal\",\n" + + "\t\t\t\t\t\t\t\t\"output\":\"5641599010392971076\",\n" + + "\t\t\t\t\t\t\t\t\"refTableName\":\"mcsql_inner_01\"\n" + + "\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t]\n" + + "\t\t\t\t\t},\n" + + "\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\"nodeId\":\"5641599010392971076\",\n" + + "\t\t\t\t\t\t\"depends\":[\n" + + "\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\"type\":\"Normal\",\n" + + "\t\t\t\t\t\t\t\t\"output\":\"lwt_test_dd.504470094_out\"\n" + + "\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t]\n" + + "\t\t\t\t\t}\n" + + "\t\t\t\t]\n" + + "\t\t\t}\n" + + "\t\t],\n" + + "\t\t\"flow\":[\n" + + "\t\t\t{\n" + + "\t\t\t\t\"nodeId\":\"8620630926993095479\",\n" + + "\t\t\t\t\"depends\":[\n" + + "\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\"type\":\"Normal\",\n" + + "\t\t\t\t\t\t\"output\":\"lwt_test_dd_root\"\n" + + "\t\t\t\t\t}\n" + + "\t\t\t\t]\n" + + "\t\t\t}\n" + + "\t\t]\n" + + "\t},\n" + + "\t\"metadata\":{\n" + + "\t\t\"innerVersion\":{\n" + + "\t\t\t\"7751009343504221738\":1,\n" + + "\t\t\t\"5641599010392971076\":4\n" + + "\t\t},\n" + + "\t\t\"gmtModified\":1722320416000,\n" + + "\t\t\"tenantId\":\"524257424564736\",\n" + + "\t\t\"projectId\":\"295425\",\n" + + "\t\t\"uuid\":\"8620630926993095479\"\n" + + "\t}\n" + + "}"; Specification specification = SpecUtil.parseToDomain(spec); - Optional component = Optional.ofNullable(specification) - .map(Specification::getSpec).map(DataWorksWorkflowSpec::getNodes) - .flatMap(l -> l.stream().map(SpecNode::getComponent).filter(Objects::nonNull).findAny()); + DataWorksNodeAdapter adapter = new DataWorksNodeAdapter(specification, specification.getSpec().getWorkflows().get(0)); + log.info("workflow inputs: {}", adapter.getInputs()); + Assert.assertTrue(CollectionUtils.isNotEmpty(adapter.getInputs())); + + List nodes = specification.getSpec().getWorkflows().get(0).getNodes(); + for (SpecNode node : nodes) { + DataWorksNodeAdapter nodeAdapter = new DataWorksNodeAdapter(specification, node); + log.info("node: {}", nodeAdapter.getInputs()); + Assert.assertTrue(CollectionUtils.isNotEmpty(nodeAdapter.getInputs())); + } + } - Assert.assertTrue(component.isPresent()); - Assert.assertNotNull(component.get().getMetadata()); - Assert.assertNotNull(component.get().getId()); - Assert.assertNotNull(component.get().getInputs()); - Assert.assertNotNull(component.get().getOutputs()); + @Test + public void testInnerNodes() { + Specification sp = new Specification<>(); + sp.setKind(SpecKind.CYCLE_WORKFLOW.getLabel()); + sp.setVersion(SpecVersion.V_1_2_0.getLabel()); + DataWorksWorkflowSpec spec = new DataWorksWorkflowSpec(); + SpecNode node = new SpecNode(); + SpecSubFlow subflow = new SpecSubFlow(); + SpecNode subnode = new SpecNode(); + subnode.setId("subnode1"); + subnode.setName("subnode1"); + subflow.setNodes(Collections.singletonList(subnode)); + node.setSubflow(subflow); + spec.setNodes(Collections.singletonList(node)); + sp.setSpec(spec); + JSONObject json = JSONObject.parseObject(SpecUtil.writeToSpec(sp)); + log.info("spec json: {}", json.toJSONString(Feature.PrettyFormat)); + Assert.assertNotNull(json); + Assert.assertEquals(subnode.getId(), json.getByPath("$.spec.nodes[0].subflow.nodes[0].id")); + Assert.assertEquals(subnode.getName(), json.getByPath("$.spec.nodes[0].subflow.nodes[0].name")); + + Specification parsed = SpecUtil.parseToDomain(json.toJSONString()); + Assert.assertNotNull(parsed); + Assert.assertNotNull(parsed.getSpec()); + Assert.assertNotNull(parsed.getSpec().getNodes()); + Assert.assertNotNull(parsed.getSpec().getNodes().get(0).getSubflow()); + Assert.assertNotNull(parsed.getSpec().getNodes().get(0).getSubflow().getNodes()); + Assert.assertEquals(subnode.getId(), parsed.getSpec().getNodes().get(0).getSubflow().getNodes().get(0).getId()); + Assert.assertEquals(subnode.getName(), parsed.getSpec().getNodes().get(0).getSubflow().getNodes().get(0).getName()); } } \ No newline at end of file diff --git a/spec/src/test/java/com/aliyun/dataworks/common/spec/SpecWriterUtilTest.java b/spec/src/test/java/com/aliyun/dataworks/common/spec/SpecWriterUtilTest.java index 19dd4d5..7e66cca 100644 --- a/spec/src/test/java/com/aliyun/dataworks/common/spec/SpecWriterUtilTest.java +++ b/spec/src/test/java/com/aliyun/dataworks/common/spec/SpecWriterUtilTest.java @@ -118,6 +118,7 @@ public void testWriterFactory() { container.setEnv(Lists.newArrayList(envVar1, envVar2)); runtime.setContainer(container); + runtime.setCu("0.5"); CodeModel hive = CodeModelFactory.getCodeModel("EMR_HIVE", "{}"); EmrCode emrCode = (EmrCode)hive.getCodeModel(); diff --git a/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/codemodel/CodeModelFactoryTest.java b/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/codemodel/CodeModelFactoryTest.java index d939bf2..da7fe04 100644 --- a/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/codemodel/CodeModelFactoryTest.java +++ b/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/codemodel/CodeModelFactoryTest.java @@ -21,7 +21,6 @@ import com.aliyun.dataworks.common.spec.domain.dw.types.CodeProgramType; import com.aliyun.dataworks.common.spec.utils.GsonUtils; - import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.junit.Assert; @@ -40,7 +39,7 @@ public void testGetCodeModel() { odpsSparkCode.getCodeModel().getResourceReferences().add("test_res.jar"); System.out.println("code content: " + odpsSparkCode.getCodeModel().getContent()); Assert.assertTrue(StringUtils.indexOf( - odpsSparkCode.getContent(), "resourceReferences") >= 0); + odpsSparkCode.getContent(), "##@resource_reference{\"test_res.jar\"}") >= 0); Assert.assertNotNull(odpsSparkCode.getCodeModel()); Assert.assertNotNull(odpsSparkCode.getCodeModel().getSparkJson()); @@ -53,12 +52,12 @@ public void testGetCodeModel() { CodeModel joinCode = CodeModelFactory.getCodeModel("CONTROLLER_JOIN", null); joinCode.getCodeModel() - .setBranchList(Collections.singletonList( - new ControllerJoinCode.Branch() - .setLogic(0) - .setNode("branch_node") - .setRunStatus(Arrays.asList("1", "2")))) - .setResultStatus("2"); + .setBranchList(Collections.singletonList( + new ControllerJoinCode.Branch() + .setLogic(0) + .setNode("branch_node") + .setRunStatus(Arrays.asList("1", "2")))) + .setResultStatus("2"); System.out.println("join code: " + joinCode.getCodeModel().getContent()); Assert.assertNotNull(joinCode.getCodeModel()); Assert.assertTrue(StringUtils.indexOf(joinCode.getContent(), "branch_node") >= 0); @@ -72,7 +71,7 @@ public void testGetCodeModel() { CodeModel emrHive = CodeModelFactory.getCodeModel("EMR_HIVE", null); System.out.println("emr hive code: " + emrHive.getContent()); - EmrCode code = (EmrCode) emrHive.getCodeModel(); + EmrCode code = (EmrCode)emrHive.getCodeModel(); System.out.println("emr code mode: {}" + GsonUtils.toJsonString(code)); } @@ -83,9 +82,9 @@ public void testMultiLanguageCode() { m.getCodeModel().setLanguage("odps"); CodeModel m2 = CodeModelFactory.getCodeModel("CONTROLLER_CYCLE_END", "{\n" - + " \"language\": \"odps\",\n" - + " \"content\": \"select 1\"\n" - + "}"); + + " \"language\": \"odps\",\n" + + " \"content\": \"select 1\"\n" + + "}"); System.out.println("assignment content: " + m.getContent()); System.out.println("assignment source code: " + m.getSourceCode()); Assert.assertEquals("select 1", m.getSourceCode()); @@ -103,41 +102,41 @@ public void testMultiLanguageCode() { @Test public void testDefaultJsonFormCode() { String code = "{\n" - + " \"content\": \"IMPORT FOREIGN SCHEMA shanghai_onlineTest_simple LIMIT TO (wq_test_dataworks_pt_001) from SERVER " - + "odps_server INTO public OPTIONS(prefix 'tmp_foreign_', suffix 'xozi4mmb', if_table_exist 'error',if_unsupported_type 'error');" - + "\\nDROP TABLE IF EXISTS \\\"public\\\".tmp_holo_8gwvxopb_wqtest;\\nBEGIN;\\nCREATE TABLE IF NOT EXISTS \\\"public\\\"" - + ".tmp_holo_8gwvxopb_wqtest (\\n \\\"f1\\\" text NOT NULL,\\n \\\"f2\\\" text NOT NULL,\\n \\\"f4\\\" text NOT NULL,\\n \\\"f5\\\" " - + "text NOT NULL,\\n \\\"f3\\\" text NOT NULL,\\n \\\"f6\\\" text NOT NULL,\\n \\\"f7\\\" text NOT NULL,\\n \\\"f10\\\" text NOT NULL," - + "\\n \\\"ds\\\" bigint NOT NULL,\\n \\\"pt\\\" text NOT NULL\\n);\\nCALL SET_TABLE_PROPERTY('\\\"public\\\"" - + ".tmp_holo_8gwvxopb_wqtest', 'orientation', 'column');\\ncomment on column \\\"public\\\".tmp_holo_8gwvxopb_wqtest.pt is '分区字段';" - + "\\nCOMMIT;\\nINSERT INTO \\\"public\\\".tmp_holo_8gwvxopb_wqtest\\nSELECT \\n CAST(\\\"f1\\\" as text),\\n CAST(\\\"f2\\\" as " - + "text),\\n CAST(\\\"f4\\\" as text),\\n CAST(\\\"f5\\\" as text),\\n CAST(\\\"f3\\\" as text),\\n CAST(\\\"f6\\\" as " - + "text),\\n CAST(\\\"f7\\\" as text),\\n CAST(\\\"f10\\\" as text),\\n CAST(\\\"ds\\\" as bigint),\\n CAST(\\\"pt\\\" as " - + "text)\\nFROM \\\"public\\\".tmp_foreign_wq_test_dataworks_pt_001xozi4mmb\\nWHERE pt='${bizdate}';\\nDROP FOREIGN TABLE IF EXISTS " - + "\\\"public\\\".tmp_foreign_wq_test_dataworks_pt_001xozi4mmb;BEGIN;\\nDROP TABLE IF EXISTS \\\"public\\\".wqtest;\\nALTER TABLE " - + "\\\"public\\\".tmp_holo_8gwvxopb_wqtest RENAME TO wqtest;\\nCOMMIT;\\n\",\n" - + " \"extraContent\": \"{\\\"connId\\\":\\\"yongxunqa_holo_shanghai\\\",\\\"dbName\\\":\\\"yongxunqa_hologres_db\\\"," - + "\\\"syncType\\\":1,\\\"extendProjectName\\\":\\\"shanghai_onlineTest_simple\\\",\\\"schemaName\\\":\\\"public\\\"," - + "\\\"tableName\\\":\\\"wqtest\\\",\\\"partitionColumn\\\":\\\"\\\",\\\"orientation\\\":\\\"column\\\"," - + "\\\"columns\\\":[{\\\"name\\\":\\\"f1\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false," - + "\\\"holoName\\\":\\\"f1\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f2\\\",\\\"comment\\\":\\\"\\\"," - + "\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f2\\\",\\\"holoType\\\":\\\"text\\\"}," - + "{\\\"name\\\":\\\"f4\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f4\\\"," - + "\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f5\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\"," - + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f5\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f3\\\"," - + "\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f3\\\"," - + "\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f6\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\"," - + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f6\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f7\\\"," - + "\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f7\\\"," - + "\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f10\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\"," - + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f10\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"ds\\\"," - + "\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"BIGINT\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"ds\\\"," - + "\\\"holoType\\\":\\\"bigint\\\"},{\\\"name\\\":\\\"pt\\\",\\\"comment\\\":\\\"分区字段\\\",\\\"type\\\":\\\"STRING\\\"," - + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"pt\\\",\\\"holoType\\\":\\\"text\\\"}],\\\"serverName\\\":\\\"odps_server\\\"," - + "\\\"extendTableName\\\":\\\"wq_test_dataworks_pt_001\\\",\\\"foreignSchemaName\\\":\\\"public\\\",\\\"foreignTableName\\\":\\\"\\\"," - + "\\\"instanceId\\\":\\\"yongxunqa_holo_shanghai\\\",\\\"engineType\\\":\\\"Hologres\\\",\\\"clusteringKey\\\":[]," - + "\\\"bitmapIndexKey\\\":[],\\\"segmentKey\\\":[],\\\"dictionaryEncoding\\\":[]}\"\n" - + " }"; + + " \"content\": \"IMPORT FOREIGN SCHEMA shanghai_onlineTest_simple LIMIT TO (wq_test_dataworks_pt_001) from SERVER " + + "odps_server INTO public OPTIONS(prefix 'tmp_foreign_', suffix 'xozi4mmb', if_table_exist 'error',if_unsupported_type 'error');" + + "\\nDROP TABLE IF EXISTS \\\"public\\\".tmp_holo_8gwvxopb_wqtest;\\nBEGIN;\\nCREATE TABLE IF NOT EXISTS \\\"public\\\"" + + ".tmp_holo_8gwvxopb_wqtest (\\n \\\"f1\\\" text NOT NULL,\\n \\\"f2\\\" text NOT NULL,\\n \\\"f4\\\" text NOT NULL,\\n \\\"f5\\\" " + + "text NOT NULL,\\n \\\"f3\\\" text NOT NULL,\\n \\\"f6\\\" text NOT NULL,\\n \\\"f7\\\" text NOT NULL,\\n \\\"f10\\\" text NOT NULL," + + "\\n \\\"ds\\\" bigint NOT NULL,\\n \\\"pt\\\" text NOT NULL\\n);\\nCALL SET_TABLE_PROPERTY('\\\"public\\\"" + + ".tmp_holo_8gwvxopb_wqtest', 'orientation', 'column');\\ncomment on column \\\"public\\\".tmp_holo_8gwvxopb_wqtest.pt is '分区字段';" + + "\\nCOMMIT;\\nINSERT INTO \\\"public\\\".tmp_holo_8gwvxopb_wqtest\\nSELECT \\n CAST(\\\"f1\\\" as text),\\n CAST(\\\"f2\\\" as " + + "text),\\n CAST(\\\"f4\\\" as text),\\n CAST(\\\"f5\\\" as text),\\n CAST(\\\"f3\\\" as text),\\n CAST(\\\"f6\\\" as " + + "text),\\n CAST(\\\"f7\\\" as text),\\n CAST(\\\"f10\\\" as text),\\n CAST(\\\"ds\\\" as bigint),\\n CAST(\\\"pt\\\" as " + + "text)\\nFROM \\\"public\\\".tmp_foreign_wq_test_dataworks_pt_001xozi4mmb\\nWHERE pt='${bizdate}';\\nDROP FOREIGN TABLE IF EXISTS " + + "\\\"public\\\".tmp_foreign_wq_test_dataworks_pt_001xozi4mmb;BEGIN;\\nDROP TABLE IF EXISTS \\\"public\\\".wqtest;\\nALTER TABLE " + + "\\\"public\\\".tmp_holo_8gwvxopb_wqtest RENAME TO wqtest;\\nCOMMIT;\\n\",\n" + + " \"extraContent\": \"{\\\"connId\\\":\\\"yongxunqa_holo_shanghai\\\",\\\"dbName\\\":\\\"yongxunqa_hologres_db\\\"," + + "\\\"syncType\\\":1,\\\"extendProjectName\\\":\\\"shanghai_onlineTest_simple\\\",\\\"schemaName\\\":\\\"public\\\"," + + "\\\"tableName\\\":\\\"wqtest\\\",\\\"partitionColumn\\\":\\\"\\\",\\\"orientation\\\":\\\"column\\\"," + + "\\\"columns\\\":[{\\\"name\\\":\\\"f1\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false," + + "\\\"holoName\\\":\\\"f1\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f2\\\",\\\"comment\\\":\\\"\\\"," + + "\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f2\\\",\\\"holoType\\\":\\\"text\\\"}," + + "{\\\"name\\\":\\\"f4\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f4\\\"," + + "\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f5\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\"," + + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f5\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f3\\\"," + + "\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f3\\\"," + + "\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f6\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\"," + + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f6\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f7\\\"," + + "\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f7\\\"," + + "\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f10\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\"," + + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f10\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"ds\\\"," + + "\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"BIGINT\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"ds\\\"," + + "\\\"holoType\\\":\\\"bigint\\\"},{\\\"name\\\":\\\"pt\\\",\\\"comment\\\":\\\"分区字段\\\",\\\"type\\\":\\\"STRING\\\"," + + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"pt\\\",\\\"holoType\\\":\\\"text\\\"}],\\\"serverName\\\":\\\"odps_server\\\"," + + "\\\"extendTableName\\\":\\\"wq_test_dataworks_pt_001\\\",\\\"foreignSchemaName\\\":\\\"public\\\",\\\"foreignTableName\\\":\\\"\\\"," + + "\\\"instanceId\\\":\\\"yongxunqa_holo_shanghai\\\",\\\"engineType\\\":\\\"Hologres\\\",\\\"clusteringKey\\\":[]," + + "\\\"bitmapIndexKey\\\":[],\\\"segmentKey\\\":[],\\\"dictionaryEncoding\\\":[]}\"\n" + + " }"; CodeModel codeModel = CodeModelFactory.getCodeModel(CodeProgramType.HOLOGRES_SYNC_DATA.name(), code); log.info("content: {}", codeModel.getCodeModel().getContent()); log.info("extraContent: {}", codeModel.getCodeModel().getExtraContent()); @@ -162,19 +161,19 @@ public void testEmrCodeJobTypeDefault() { Assert.assertEquals(EmrJobType.HIVE_SQL, emr.getCodeModel().getType()); String originalCode = "{\n" - + " \"type\": \"TRINO_SQL\",\n" - + " \"launcher\": {\n" - + " \"allocationSpec\": {}\n" - + " },\n" - + " \"properties\": {\n" - + " \"envs\": {},\n" - + " \"arguments\": [\n" - + " \"hive -e\"\n" - + " ],\n" - + " \"tags\": []\n" - + " },\n" - + " \"programType\": \"EMR_HIVE\"\n" - + "}"; + + " \"type\": \"TRINO_SQL\",\n" + + " \"launcher\": {\n" + + " \"allocationSpec\": {}\n" + + " },\n" + + " \"properties\": {\n" + + " \"envs\": {},\n" + + " \"arguments\": [\n" + + " \"hive -e\"\n" + + " ],\n" + + " \"tags\": []\n" + + " },\n" + + " \"programType\": \"EMR_HIVE\"\n" + + "}"; // if emr job type is set in code json, it will be used by ignoring the CodeProgramType emr = CodeModelFactory.getCodeModel("EMR_HIVE", originalCode); @@ -186,8 +185,8 @@ public void testEmrCodeJobTypeDefault() { public void testEscapeHtml() { CodeModel code = CodeModelFactory.getCodeModel(CodeProgramType.EMR_SPARK.name(), null); code.getCodeModel().setSourceCode( - "spark-submit --deploy-mode cluster --class org.apache.spark.examples.SparkPi http://schedule@{env}inside.cheetah.alibaba-inc" - + ".com/scheduler/res?id=282842366"); + "spark-submit --deploy-mode cluster --class org.apache.spark.examples.SparkPi http://schedule@{env}inside.cheetah.alibaba-inc" + + ".com/scheduler/res?id=282842366"); log.info("content: {}", code.getCodeModel().getContent()); diff --git a/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/codemodel/EmrCodeTest.java b/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/codemodel/EmrCodeTest.java index b2c065c..91d093e 100644 --- a/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/codemodel/EmrCodeTest.java +++ b/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/codemodel/EmrCodeTest.java @@ -17,7 +17,6 @@ import java.util.Collections; -import com.google.gson.JsonSyntaxException; import org.apache.commons.lang3.StringUtils; import org.junit.Assert; import org.junit.Test; @@ -92,11 +91,12 @@ public void testEmrCodeParse() { Assert.assertTrue(StringUtils.indexOf(emr.getContent(), "hive -e") > 0); } - @Test(expected = JsonSyntaxException.class) + @Test public void testEmrCodeParseException() { CodeModel emr = CodeModelFactory.getCodeModel("EMR_HIVE", "select 1"); Assert.assertNotNull(emr); Assert.assertNotNull(emr.getCodeModel()); + Assert.assertEquals(EmrJobType.HIVE_SQL, emr.getCodeModel().getType()); } @Test diff --git a/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeAdapterTest.java b/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeAdapterTest.java index 60de3cc..726217a 100644 --- a/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeAdapterTest.java +++ b/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeAdapterTest.java @@ -18,6 +18,7 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.stream.Collectors; @@ -34,6 +35,7 @@ import com.aliyun.dataworks.common.spec.domain.ref.SpecNode; import com.aliyun.dataworks.common.spec.domain.ref.SpecNodeOutput; import com.aliyun.dataworks.common.spec.domain.ref.SpecScript; +import com.aliyun.dataworks.common.spec.domain.ref.SpecWorkflow; import com.aliyun.dataworks.common.spec.domain.ref.component.SpecComponent; import com.aliyun.dataworks.common.spec.domain.ref.component.SpecComponentParameter; import com.aliyun.dataworks.common.spec.domain.ref.runtime.SpecScriptRuntime; @@ -174,7 +176,7 @@ public void testForeach() throws IOException { Assert.assertNotNull(foreach.getForeach()); Assert.assertNotNull(foreach.getForeach().getNodes()); Assert.assertEquals(3, CollectionUtils.size(foreach.getInnerNodes())); - Assert.assertNotNull(foreach.getInnerFlow()); + Assert.assertNotNull(foreach.getInnerDependencies()); ListUtils.emptyIfNull(specObj.getSpec().getNodes().get(0).getInnerNodes()).forEach(inner -> { DataWorksNodeAdapter adapter = new DataWorksNodeAdapter(specObj, inner); @@ -270,6 +272,175 @@ public void testGetDependentTypeWithOutputs() { Assert.assertEquals(2, (int)dataWorksNodeAdapter.getNodeType()); } + @Test + public void testGetDependentTypeForWorkflowWithOutputs() { + String specStr = "{\n" + + "\t\"version\":\"1.1.0\",\n" + + "\t\"kind\":\"CycleWorkflow\",\n" + + "\t\"spec\":{\n" + + "\t\t\"name\":\"工作流跨周期小时依赖内部节点依赖外部\",\n" + + "\t\t\"id\":\"5992975956233455901\",\n" + + "\t\t\"type\":\"CycleWorkflow\",\n" + + "\t\t\"owner\":\"206561090452322657\",\n" + + "\t\t\"workflows\":[\n" + + "\t\t\t{\n" + + "\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\"path\":\"李文涛测试工作流/工作流调度依赖跨周期场景/工作流跨周期小时依赖内部节点依赖外部\",\n" + + "\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\"command\":\"WORKFLOW\"\n" + + "\t\t\t\t\t},\n" + + "\t\t\t\t\t\"id\":\"4928814091066534424\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"id\":\"5992975956233455901\",\n" + + "\t\t\t\t\"trigger\":{\n" + + "\t\t\t\t\t\"type\":\"Scheduler\",\n" + + "\t\t\t\t\t\"id\":\"7019189558026663217\",\n" + + "\t\t\t\t\t\"cron\":\"00 13 00 * * ?\",\n" + + "\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\",\n" + + "\t\t\t\t\t\"timezone\":\"Asia/Shanghai\",\n" + + "\t\t\t\t\t\"delaySeconds\":0\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"strategy\":{\n" + + "\t\t\t\t\t\"timeout\":0,\n" + + "\t\t\t\t\t\"rerunTimes\":3,\n" + + "\t\t\t\t\t\"rerunInterval\":180000,\n" + + "\t\t\t\t\t\"failureStrategy\":\"Break\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"name\":\"工作流跨周期小时依赖内部节点依赖外部\",\n" + + "\t\t\t\t\"owner\":\"206561090452322657\",\n" + + "\t\t\t\t\"metadata\":{\n" + + "\t\t\t\t\t\"owner\":\"206561090452322657\",\n" + + "\t\t\t\t\t\"tenantId\":\"524257424564736\",\n" + + "\t\t\t\t\t\"project\":{\n" + + "\t\t\t\t\t\t\"mode\":\"SIMPLE\",\n" + + "\t\t\t\t\t\t\"projectOwnerId\":\"1107550004253538\",\n" + + "\t\t\t\t\t\t\"tenantId\":\"524257424564736\",\n" + + "\t\t\t\t\t\t\"simple\":true,\n" + + "\t\t\t\t\t\t\"projectIdentifier\":\"lwt_test_newIde\",\n" + + "\t\t\t\t\t\t\"projectName\":\"李文涛测试新版ide\",\n" + + "\t\t\t\t\t\t\"projectId\":\"528891\"\n" + + "\t\t\t\t\t},\n" + + "\t\t\t\t\t\"ownerName\":\"lwttest04\",\n" + + "\t\t\t\t\t\"projectId\":\"528891\",\n" + + "\t\t\t\t\t\"schedulerNodeId\":700006657376\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"inputs\":{\n" + + "\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"outputs\":{\n" + + "\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"data\":\"5992975956233455901\",\n" + + "\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\"refTableName\":\"工作流跨周期小时依赖内部节点依赖外部\",\n" + + "\t\t\t\t\t\t\t\"isDefault\":true\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t]\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"nodes\":[\n" + + "\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\"recurrence\":\"Normal\",\n" + + "\t\t\t\t\t\t\"id\":\"6203019234746940306\",\n" + + "\t\t\t\t\t\t\"timeout\":0,\n" + + "\t\t\t\t\t\t\"instanceMode\":\"T+1\",\n" + + "\t\t\t\t\t\t\"rerunMode\":\"Allowed\",\n" + + "\t\t\t\t\t\t\"rerunTimes\":3,\n" + + "\t\t\t\t\t\t\"rerunInterval\":180000,\n" + + "\t\t\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\t\t\"path\":\"李文涛测试工作流/工作流调度依赖跨周期场景/工作流跨周期小时依赖内部节点依赖外部/工作流跨周期小时依赖内部节点依赖外部_内部节点\",\n" + + "\t\t\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\t\t\"command\":\"DIDE_SHELL\",\n" + + "\t\t\t\t\t\t\t\t\"commandTypeId\":6\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"content\":\"#!/bin/bash\\n#********************************************************************#\\n##author" + + ":lwttest04\\n##create time:2024-08-19 18:41:26\\n#********************************************************************#\",\n" + + "\t\t\t\t\t\t\t\"id\":\"8296284929718477332\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"trigger\":{\n" + + "\t\t\t\t\t\t\t\"type\":\"Scheduler\",\n" + + "\t\t\t\t\t\t\t\"id\":\"4696080677296779430\",\n" + + "\t\t\t\t\t\t\t\"cron\":\"00 06 00 * * ?\",\n" + + "\t\t\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\"timezone\":\"Asia/Shanghai\",\n" + + "\t\t\t\t\t\t\t\"delaySeconds\":0\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"runtimeResource\":{\n" + + "\t\t\t\t\t\t\t\"resourceGroup\":\"S_res_group_524257424564736_1722829742200\",\n" + + "\t\t\t\t\t\t\t\"id\":\"7011860292150347087\",\n" + + "\t\t\t\t\t\t\t\"resourceGroupId\":\"72014319\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"name\":\"工作流跨周期小时依赖内部节点依赖外部_内部节点\",\n" + + "\t\t\t\t\t\t\"owner\":\"206561090452322657\",\n" + + "\t\t\t\t\t\t\"metadata\":{\n" + + "\t\t\t\t\t\t\t\"owner\":\"206561090452322657\",\n" + + "\t\t\t\t\t\t\t\"container\":{\n" + + "\t\t\t\t\t\t\t\t\"type\":\"Flow\",\n" + + "\t\t\t\t\t\t\t\t\"uuid\":\"5992975956233455901\"\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"ownerName\":\"lwttest04\",\n" + + "\t\t\t\t\t\t\t\"schedulerNodeId\":700006657377,\n" + + "\t\t\t\t\t\t\t\"tenantId\":\"524257424564736\",\n" + + "\t\t\t\t\t\t\t\"project\":{\n" + + "\t\t\t\t\t\t\t\t\"mode\":\"SIMPLE\",\n" + + "\t\t\t\t\t\t\t\t\"projectOwnerId\":\"1107550004253538\",\n" + + "\t\t\t\t\t\t\t\t\"tenantId\":\"524257424564736\",\n" + + "\t\t\t\t\t\t\t\t\"simple\":true,\n" + + "\t\t\t\t\t\t\t\t\"projectIdentifier\":\"lwt_test_newIde\",\n" + + "\t\t\t\t\t\t\t\t\"projectName\":\"李文涛测试新版ide\",\n" + + "\t\t\t\t\t\t\t\t\"projectId\":\"528891\"\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"projectId\":\"528891\"\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"inputs\":{\n" + + "\n" + + "\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\"outputs\":{\n" + + "\t\t\t\t\t\t\t\"nodeOutputs\":[\n" + + "\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\"data\":\"6203019234746940306\",\n" + + "\t\t\t\t\t\t\t\t\t\"artifactType\":\"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\t\t\"refTableName\":\"工作流跨周期小时依赖内部节点依赖外部_内部节点\",\n" + + "\t\t\t\t\t\t\t\t\t\"isDefault\":true\n" + + "\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t}\n" + + "\t\t\t\t],\n" + + "\t\t\t\t\"dependencies\":[\n" + + "\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\"nodeId\":\"6203019234746940306\",\n" + + "\t\t\t\t\t\t\"depends\":[\n" + + "\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\"type\":\"CrossCycleDependsOnOtherNode\",\n" + + "\t\t\t\t\t\t\t\t\"output\":\"7922382126549470808\",\n" + + "\t\t\t\t\t\t\t\t\"refTableName\":\"工作流跨周期依赖上游\"\n" + + "\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t]\n" + + "\t\t\t\t\t}\n" + + "\t\t\t\t]\n" + + "\t\t\t}\n" + + "\t\t]\n" + + "\t},\n" + + "\t\"metadata\":{\n" + + "\t\t\"uuid\":\"6203019234746940306\",\n" + + "\t\t\"gmtModified\":1724064225000\n" + + "\t}\n" + + "}"; + Specification spec = SpecUtil.parseToDomain(specStr); + SpecWorkflow workflow = spec.getSpec().getWorkflows().get(0); + DataWorksNodeAdapter dataWorksNodeAdapter = new DataWorksNodeAdapter(spec, workflow.getNodes().get(0)); + DwNodeDependentTypeInfo info = dataWorksNodeAdapter.getDependentType(outputs -> Collections.singletonList(123456789L)); + Assert.assertNotNull(info); + Assert.assertEquals(info.getDependentType(), DwNodeDependentTypeInfo.USER_DEFINE); + Assert.assertNotNull(info.getDependentNodeOutputList()); + Assert.assertTrue(info.getDependentNodeOutputList().contains("7922382126549470808")); + Assert.assertTrue(info.getDependentNodeIdList().contains(123456789L)); + + Assert.assertEquals(0, (int)dataWorksNodeAdapter.getNodeType()); + } + @Test public void test() { String spec = "{\n" @@ -780,4 +951,55 @@ public void testComponentSqlCode() { log.info("code: {}", code); Assert.assertEquals("select 'var1', '@@{p2}', '@@{p3}';", code); } + + @Test + public void testWorkflow() { + Specification specification = new Specification<>(); + DataWorksWorkflowSpec spec = new DataWorksWorkflowSpec(); + SpecWorkflow specWorkflow = new SpecWorkflow(); + SpecNodeOutput input = new SpecNodeOutput(); + input.setData("autotest.123_out"); + specWorkflow.setInputs(Collections.singletonList(input)); + SpecNodeOutput output = new SpecNodeOutput(); + output.setData("autotest.456_out"); + specWorkflow.setOutputs(Collections.singletonList(output)); + + SpecScript script = new SpecScript(); + SpecScriptRuntime runtime = new SpecScriptRuntime(); + runtime.setCommand("WORKFLOW"); + script.setRuntime(runtime); + specWorkflow.setScript(script); + spec.setWorkflows(Collections.singletonList(specWorkflow)); + + DataWorksNodeAdapter adapter = new DataWorksNodeAdapter(specification, specWorkflow); + + Assert.assertNotNull(adapter.getInputs()); + Assert.assertEquals(1, adapter.getInputs().size()); + Assert.assertNotNull(adapter.getOutputs()); + Assert.assertEquals(1, adapter.getOutputs().size()); + Assert.assertEquals(123, (int)adapter.getPrgType(cmd -> 123)); + } + + @Test + public void testStreamLaunchMode() { + Specification sp = new Specification<>(); + DataWorksWorkflowSpec spec = new DataWorksWorkflowSpec(); + SpecNode node = new SpecNode(); + node.setId("11"); + node.setName("sparkstreaming1"); + SpecScript script = new SpecScript(); + SpecScriptRuntime runtime = new SpecScriptRuntime(); + Map config = new HashMap<>(); + config.put(DataWorksNodeAdapter.STREAM_LAUNCH_MODE, 1); + runtime.setStreamJobConfig(config); + script.setRuntime(runtime); + node.setScript(script); + spec.setNodes(Collections.singletonList(node)); + sp.setSpec(spec); + + DataWorksNodeAdapter dataWorksNodeAdapter = new DataWorksNodeAdapter(sp, node); + Map extraConf = dataWorksNodeAdapter.getExtConfig(); + Assert.assertNotNull(extraConf); + Assert.assertEquals(1, extraConf.get(DataWorksNodeAdapter.STREAM_LAUNCH_MODE)); + } } diff --git a/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeCodeAdapterTest.java b/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeCodeAdapterTest.java index 43d5ac8..38e8e05 100644 --- a/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeCodeAdapterTest.java +++ b/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeCodeAdapterTest.java @@ -49,7 +49,6 @@ import com.aliyun.dataworks.common.spec.domain.ref.runtime.emr.EmrJobExecuteMode; import com.aliyun.dataworks.common.spec.domain.ref.runtime.emr.EmrJobSubmitMode; import com.aliyun.dataworks.common.spec.utils.JSONUtils; - import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -164,23 +163,82 @@ public void testJoin() { @Test public void testEmr() { - SpecNode emrNode = new SpecNode(); - SpecScript script = new SpecScript(); - script.setLanguage("hive-sql"); - script.setContent("select 'true';"); - SpecScriptRuntime runtime = new SpecScriptRuntime(); - runtime.setCommand(CodeProgramType.EMR_HIVE.name()); - EmrJobConfig emrJobConfig = new EmrJobConfig(); - emrJobConfig.setExecuteMode(EmrJobExecuteMode.SINGLE); - emrJobConfig.setSubmitMode(EmrJobSubmitMode.YARN); - emrJobConfig.setQueue("ods"); - emrJobConfig.setPriority(5); - emrJobConfig.setSessionEnabled(true); - runtime.setEmrJobConfig(emrJobConfig.toMap()); - script.setRuntime(runtime); - emrNode.setScript(script); + String spec = "{\n" + + "\t\"version\":\"1.1.0\",\n" + + "\t\"kind\":\"CycleWorkflow\",\n" + + "\t\"spec\":{\n" + + "\t\t\"nodes\":[\n" + + "\t\t\t{\n" + + "\t\t\t\t\"recurrence\":\"Normal\",\n" + + "\t\t\t\t\"id\":\"7261439383042556772\",\n" + + "\t\t\t\t\"timeout\":0,\n" + + "\t\t\t\t\"instanceMode\":\"T+1\",\n" + + "\t\t\t\t\"rerunMode\":\"Allowed\",\n" + + "\t\t\t\t\"rerunTimes\":3,\n" + + "\t\t\t\t\"rerunInterval\":180000,\n" + + "\t\t\t\t\"datasource\":{\n" + + "\t\t\t\t\t\"name\":\"dt_spark_cluster_ea120_01\",\n" + + "\t\t\t\t\t\"type\":\"emr\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"script\":{\n" + + "\t\t\t\t\t\"language\":\"spark-sql\",\n" + + "\t\t\t\t\t\"path\":\"个秋/onefall_test_spark_sql_1\",\n" + + "\t\t\t\t\t\"runtime\":{\n" + + "\t\t\t\t\t\t\"command\":\"EMR_SPARK_SQL\",\n" + + "\t\t\t\t\t\t\"commandTypeId\":229,\n" + + "\t\t\t\t\t\t\"emrJobConfig\":{\n" + + "\t\t\t\t\t\t\t\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t},\n" + + "\t\t\t\t\t\"content\":\"select * from paimon.dt_spark_test_db5.students;\\n\",\n" + + "\t\t\t\t\t\"id\":\"5862898736117902935\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"trigger\":{\n" + + "\t\t\t\t\t\"type\":\"Scheduler\",\n" + + "\t\t\t\t\t\"id\":\"8461963194104597781\",\n" + + "\t\t\t\t\t\"cron\":\"00 00 00 * * ?\",\n" + + "\t\t\t\t\t\"startTime\":\"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\"endTime\":\"9999-01-01 00:00:00\",\n" + + "\t\t\t\t\t\"timezone\":\"Asia/Shanghai\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"runtimeResource\":{\n" + + "\t\t\t\t\t\"resourceGroup\":\"emr_poc_serverless_spark\",\n" + + "\t\t\t\t\t\"id\":\"8212072828324694797\",\n" + + "\t\t\t\t\t\"resourceGroupId\":\"394152227\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"name\":\"onefall_test_spark_sql_1\",\n" + + "\t\t\t\t\"owner\":\"171389\",\n" + + "\t\t\t\t\"metadata\":{\n" + + "\t\t\t\t\t\"tenantId\":\"1\",\n" + + "\t\t\t\t\t\"projectId\":\"33293\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"inputs\":{\n" + + "\t\t\t\t\t\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"outputs\":{\n" + + "\t\t\t\t\t\n" + + "\t\t\t\t}\n" + + "\t\t\t}\n" + + "\t\t],\n" + + "\t\t\"flow\":[\n" + + "\t\t\t{\n" + + "\t\t\t\t\"nodeId\":\"7261439383042556772\",\n" + + "\t\t\t\t\"depends\":[\n" + + "\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\"type\":\"Normal\",\n" + + "\t\t\t\t\t\t\"output\":\"serverless_spark_root\"\n" + + "\t\t\t\t\t}\n" + + "\t\t\t\t]\n" + + "\t\t\t}\n" + + "\t\t]\n" + + "\t},\n" + + "\t\"metadata\":{\n" + + "\t\t\"uuid\":\"7261439383042556772\"\n" + + "\t}\n" + + "}"; - DataWorksNodeCodeAdapter adapter = new DataWorksNodeCodeAdapter(emrNode); + Specification sp = SpecUtil.parseToDomain(spec); + DataWorksNodeCodeAdapter adapter = new DataWorksNodeCodeAdapter(sp.getSpec().getNodes().get(0)); System.out.println(adapter.getCode()); } @@ -211,85 +269,85 @@ public void testEmrCompatible() { @Test public void testEmrSpecNodeAdapter() { String spec = - " {\n" - + " \"version\": \"1.1.0\",\n" - + " \"kind\": \"CycleWorkflow\",\n" - + " \"spec\": {\n" - + " \"nodes\": [\n" - + " {\n" - + " \"recurrence\": \"Normal\",\n" - + " \"id\": \"5452198562404448810\",\n" - + " \"timeout\": 12,\n" - + " \"instanceMode\": \"T+1\",\n" - + " \"rerunMode\": \"Allowed\",\n" - + " \"rerunTimes\": 3,\n" - + " \"rerunInterval\": 18000,\n" - + " \"datasource\": {\n" - + " \"name\": \"test_current_account_hadoop\",\n" - + " \"type\": \"emr\"\n" - + " },\n" - + " \"script\": {\n" - + " \"path\": \"createNode/emr_hive_test_0\",\n" - + " \"language\": \"odps\",\n" - + " \"content\": \"show databases\",\n" - + " \"runtime\": {\n" - + " \"engine\": \"EMR\",\n" - + " \"command\": \"EMR_HIVE\",\n" - + " \"emrJobConfig\": {\n" - + " \"cores\": 1,\n" - + " \"executeMode\": \"SINGLE\",\n" - + " \"memory\": 1024,\n" - + " \"priority\": 1,\n" - + " \"queue\": \"default\",\n" - + " \"submitMode\": \"LOCAL\",\n" - + " \"submitter\": \"root\"\n" - + " },\n" - + " \"sparkConf\": {\n" - + " \"spark.executor.memory\": \"1024m\",\n" - + " \"spark.executor.cores\": 1,\n" - + " \"spark.executor.instances\": 1,\n" - + " \"spark.yarn.maxAppAttempts\": 1,\n" - + " \"spark.yarn.queue\": \"default\",\n" - + " \"spark.yarn.maxExecutorRetries\": 1\n" - + " }\n" - + " },\n" - + " \"parameters\": [\n" - + " {\n" - + " \"name\": \"bizdate\",\n" - + " \"artifactType\": \"Variable\",\n" - + " \"scope\": \"NodeParameter\",\n" - + " \"type\": \"System\",\n" - + " \"value\": \"${yyyymmdd}\"\n" - + " }\n" - + " ]\n" - + " },\n" - + " \"trigger\": {\n" - + " \"type\": \"Scheduler\",\n" - + " \"cron\": \"00 00 00 * * ?\",\n" - + " \"startTime\": \"1970-01-01 00:00:00\",\n" - + " \"endTime\": \"9999-01-01 00:00:00\",\n" - + " \"timezone\": \"Asia/Shanghai\"\n" - + " },\n" - + " \"runtimeResource\": {\n" - + " \"resourceGroup\": \"res_group_1\",\n" - + " \"resourceGroupId\": \"1\"\n" - + " },\n" - + " \"name\": \"emr_hive_test_0\",\n" - + " \"owner\": \"WORKER_1482465063962\",\n" - + " \"inputs\": {},\n" - + " \"outputs\": {\n" - + " \"nodeOutputs\": [\n" - + " {\n" - + " \"data\": \"5452198562404448810\",\n" - + " \"artifactType\": \"NodeOutput\",\n" - + " \"refTableName\": \"emr_hive_test_0\"\n" - + " }\n" - + " ]\n" - + " }\n" - + " }\n" - + " ]\n" - + " }\n" - + " }"; + " {\n" + + " \"version\": \"1.1.0\",\n" + + " \"kind\": \"CycleWorkflow\",\n" + + " \"spec\": {\n" + + " \"nodes\": [\n" + + " {\n" + + " \"recurrence\": \"Normal\",\n" + + " \"id\": \"5452198562404448810\",\n" + + " \"timeout\": 12,\n" + + " \"instanceMode\": \"T+1\",\n" + + " \"rerunMode\": \"Allowed\",\n" + + " \"rerunTimes\": 3,\n" + + " \"rerunInterval\": 18000,\n" + + " \"datasource\": {\n" + + " \"name\": \"test_current_account_hadoop\",\n" + + " \"type\": \"emr\"\n" + + " },\n" + + " \"script\": {\n" + + " \"path\": \"createNode/emr_hive_test_0\",\n" + + " \"language\": \"odps\",\n" + + " \"content\": \"show databases\",\n" + + " \"runtime\": {\n" + + " \"engine\": \"EMR\",\n" + + " \"command\": \"EMR_HIVE\",\n" + + " \"emrJobConfig\": {\n" + + " \"cores\": 1,\n" + + " \"executeMode\": \"SINGLE\",\n" + + " \"memory\": 1024,\n" + + " \"priority\": 1,\n" + + " \"queue\": \"default\",\n" + + " \"submitMode\": \"LOCAL\",\n" + + " \"submitter\": \"root\"\n" + + " },\n" + + " \"sparkConf\": {\n" + + " \"spark.executor.memory\": \"1024m\",\n" + + " \"spark.executor.cores\": 1,\n" + + " \"spark.executor.instances\": 1,\n" + + " \"spark.yarn.maxAppAttempts\": 1,\n" + + " \"spark.yarn.queue\": \"default\",\n" + + " \"spark.yarn.maxExecutorRetries\": 1\n" + + " }\n" + + " },\n" + + " \"parameters\": [\n" + + " {\n" + + " \"name\": \"bizdate\",\n" + + " \"artifactType\": \"Variable\",\n" + + " \"scope\": \"NodeParameter\",\n" + + " \"type\": \"System\",\n" + + " \"value\": \"${yyyymmdd}\"\n" + + " }\n" + + " ]\n" + + " },\n" + + " \"trigger\": {\n" + + " \"type\": \"Scheduler\",\n" + + " \"cron\": \"00 00 00 * * ?\",\n" + + " \"startTime\": \"1970-01-01 00:00:00\",\n" + + " \"endTime\": \"9999-01-01 00:00:00\",\n" + + " \"timezone\": \"Asia/Shanghai\"\n" + + " },\n" + + " \"runtimeResource\": {\n" + + " \"resourceGroup\": \"res_group_1\",\n" + + " \"resourceGroupId\": \"1\"\n" + + " },\n" + + " \"name\": \"emr_hive_test_0\",\n" + + " \"owner\": \"WORKER_1482465063962\",\n" + + " \"inputs\": {},\n" + + " \"outputs\": {\n" + + " \"nodeOutputs\": [\n" + + " {\n" + + " \"data\": \"5452198562404448810\",\n" + + " \"artifactType\": \"NodeOutput\",\n" + + " \"refTableName\": \"emr_hive_test_0\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + " }"; Specification sp = SpecUtil.parseToDomain(spec); SpecNode specNode = sp.getSpec().getNodes().get(0); @@ -313,41 +371,41 @@ public void testEmrSpecNodeAdapter() { @Test public void testHologresDataSyncNode() { String code = "{\n" - + " \"content\": \"IMPORT FOREIGN SCHEMA shanghai_onlineTest_simple LIMIT TO (wq_test_dataworks_pt_001) from SERVER " - + "odps_server INTO public OPTIONS(prefix 'tmp_foreign_', suffix 'xozi4mmb', if_table_exist 'error',if_unsupported_type 'error');" - + "\\nDROP TABLE IF EXISTS \\\"public\\\".tmp_holo_8gwvxopb_wqtest;\\nBEGIN;\\nCREATE TABLE IF NOT EXISTS \\\"public\\\"" - + ".tmp_holo_8gwvxopb_wqtest (\\n \\\"f1\\\" text NOT NULL,\\n \\\"f2\\\" text NOT NULL,\\n \\\"f4\\\" text NOT NULL,\\n \\\"f5\\\" " - + "text NOT NULL,\\n \\\"f3\\\" text NOT NULL,\\n \\\"f6\\\" text NOT NULL,\\n \\\"f7\\\" text NOT NULL,\\n \\\"f10\\\" text NOT NULL," - + "\\n \\\"ds\\\" bigint NOT NULL,\\n \\\"pt\\\" text NOT NULL\\n);\\nCALL SET_TABLE_PROPERTY('\\\"public\\\"" - + ".tmp_holo_8gwvxopb_wqtest', 'orientation', 'column');\\ncomment on column \\\"public\\\".tmp_holo_8gwvxopb_wqtest.pt is '分区字段';" - + "\\nCOMMIT;\\nINSERT INTO \\\"public\\\".tmp_holo_8gwvxopb_wqtest\\nSELECT \\n CAST(\\\"f1\\\" as text),\\n CAST(\\\"f2\\\" as " - + "text),\\n CAST(\\\"f4\\\" as text),\\n CAST(\\\"f5\\\" as text),\\n CAST(\\\"f3\\\" as text),\\n CAST(\\\"f6\\\" as " - + "text),\\n CAST(\\\"f7\\\" as text),\\n CAST(\\\"f10\\\" as text),\\n CAST(\\\"ds\\\" as bigint),\\n CAST(\\\"pt\\\" as " - + "text)\\nFROM \\\"public\\\".tmp_foreign_wq_test_dataworks_pt_001xozi4mmb\\nWHERE pt='${bizdate}';\\nDROP FOREIGN TABLE IF EXISTS " - + "\\\"public\\\".tmp_foreign_wq_test_dataworks_pt_001xozi4mmb;BEGIN;\\nDROP TABLE IF EXISTS \\\"public\\\".wqtest;\\nALTER TABLE " - + "\\\"public\\\".tmp_holo_8gwvxopb_wqtest RENAME TO wqtest;\\nCOMMIT;\\n\",\n" - + " \"extraContent\": \"{\\\"connId\\\":\\\"yongxunqa_holo_shanghai\\\",\\\"dbName\\\":\\\"yongxunqa_hologres_db\\\"," - + "\\\"syncType\\\":1,\\\"extendProjectName\\\":\\\"shanghai_onlineTest_simple\\\",\\\"schemaName\\\":\\\"public\\\"," - + "\\\"tableName\\\":\\\"wqtest\\\",\\\"partitionColumn\\\":\\\"\\\",\\\"orientation\\\":\\\"column\\\"," - + "\\\"columns\\\":[{\\\"name\\\":\\\"f1\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false," - + "\\\"holoName\\\":\\\"f1\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f2\\\",\\\"comment\\\":\\\"\\\"," - + "\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f2\\\",\\\"holoType\\\":\\\"text\\\"}," - + "{\\\"name\\\":\\\"f4\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f4\\\"," - + "\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f5\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\"," - + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f5\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f3\\\"," - + "\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f3\\\"," - + "\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f6\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\"," - + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f6\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f7\\\"," - + "\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f7\\\"," - + "\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f10\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\"," - + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f10\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"ds\\\"," - + "\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"BIGINT\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"ds\\\"," - + "\\\"holoType\\\":\\\"bigint\\\"},{\\\"name\\\":\\\"pt\\\",\\\"comment\\\":\\\"分区字段\\\",\\\"type\\\":\\\"STRING\\\"," - + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"pt\\\",\\\"holoType\\\":\\\"text\\\"}],\\\"serverName\\\":\\\"odps_server\\\"," - + "\\\"extendTableName\\\":\\\"wq_test_dataworks_pt_001\\\",\\\"foreignSchemaName\\\":\\\"public\\\",\\\"foreignTableName\\\":\\\"\\\"," - + "\\\"instanceId\\\":\\\"yongxunqa_holo_shanghai\\\",\\\"engineType\\\":\\\"Hologres\\\",\\\"clusteringKey\\\":[]," - + "\\\"bitmapIndexKey\\\":[],\\\"segmentKey\\\":[],\\\"dictionaryEncoding\\\":[]}\"\n" - + " }"; + + " \"content\": \"IMPORT FOREIGN SCHEMA shanghai_onlineTest_simple LIMIT TO (wq_test_dataworks_pt_001) from SERVER " + + "odps_server INTO public OPTIONS(prefix 'tmp_foreign_', suffix 'xozi4mmb', if_table_exist 'error',if_unsupported_type 'error');" + + "\\nDROP TABLE IF EXISTS \\\"public\\\".tmp_holo_8gwvxopb_wqtest;\\nBEGIN;\\nCREATE TABLE IF NOT EXISTS \\\"public\\\"" + + ".tmp_holo_8gwvxopb_wqtest (\\n \\\"f1\\\" text NOT NULL,\\n \\\"f2\\\" text NOT NULL,\\n \\\"f4\\\" text NOT NULL,\\n \\\"f5\\\" " + + "text NOT NULL,\\n \\\"f3\\\" text NOT NULL,\\n \\\"f6\\\" text NOT NULL,\\n \\\"f7\\\" text NOT NULL,\\n \\\"f10\\\" text NOT NULL," + + "\\n \\\"ds\\\" bigint NOT NULL,\\n \\\"pt\\\" text NOT NULL\\n);\\nCALL SET_TABLE_PROPERTY('\\\"public\\\"" + + ".tmp_holo_8gwvxopb_wqtest', 'orientation', 'column');\\ncomment on column \\\"public\\\".tmp_holo_8gwvxopb_wqtest.pt is '分区字段';" + + "\\nCOMMIT;\\nINSERT INTO \\\"public\\\".tmp_holo_8gwvxopb_wqtest\\nSELECT \\n CAST(\\\"f1\\\" as text),\\n CAST(\\\"f2\\\" as " + + "text),\\n CAST(\\\"f4\\\" as text),\\n CAST(\\\"f5\\\" as text),\\n CAST(\\\"f3\\\" as text),\\n CAST(\\\"f6\\\" as " + + "text),\\n CAST(\\\"f7\\\" as text),\\n CAST(\\\"f10\\\" as text),\\n CAST(\\\"ds\\\" as bigint),\\n CAST(\\\"pt\\\" as " + + "text)\\nFROM \\\"public\\\".tmp_foreign_wq_test_dataworks_pt_001xozi4mmb\\nWHERE pt='${bizdate}';\\nDROP FOREIGN TABLE IF EXISTS " + + "\\\"public\\\".tmp_foreign_wq_test_dataworks_pt_001xozi4mmb;BEGIN;\\nDROP TABLE IF EXISTS \\\"public\\\".wqtest;\\nALTER TABLE " + + "\\\"public\\\".tmp_holo_8gwvxopb_wqtest RENAME TO wqtest;\\nCOMMIT;\\n\",\n" + + " \"extraContent\": \"{\\\"connId\\\":\\\"yongxunqa_holo_shanghai\\\",\\\"dbName\\\":\\\"yongxunqa_hologres_db\\\"," + + "\\\"syncType\\\":1,\\\"extendProjectName\\\":\\\"shanghai_onlineTest_simple\\\",\\\"schemaName\\\":\\\"public\\\"," + + "\\\"tableName\\\":\\\"wqtest\\\",\\\"partitionColumn\\\":\\\"\\\",\\\"orientation\\\":\\\"column\\\"," + + "\\\"columns\\\":[{\\\"name\\\":\\\"f1\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false," + + "\\\"holoName\\\":\\\"f1\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f2\\\",\\\"comment\\\":\\\"\\\"," + + "\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f2\\\",\\\"holoType\\\":\\\"text\\\"}," + + "{\\\"name\\\":\\\"f4\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f4\\\"," + + "\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f5\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\"," + + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f5\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f3\\\"," + + "\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f3\\\"," + + "\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f6\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\"," + + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f6\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f7\\\"," + + "\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f7\\\"," + + "\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"f10\\\",\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"STRING\\\"," + + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"f10\\\",\\\"holoType\\\":\\\"text\\\"},{\\\"name\\\":\\\"ds\\\"," + + "\\\"comment\\\":\\\"\\\",\\\"type\\\":\\\"BIGINT\\\",\\\"allowNull\\\":false,\\\"holoName\\\":\\\"ds\\\"," + + "\\\"holoType\\\":\\\"bigint\\\"},{\\\"name\\\":\\\"pt\\\",\\\"comment\\\":\\\"分区字段\\\",\\\"type\\\":\\\"STRING\\\"," + + "\\\"allowNull\\\":false,\\\"holoName\\\":\\\"pt\\\",\\\"holoType\\\":\\\"text\\\"}],\\\"serverName\\\":\\\"odps_server\\\"," + + "\\\"extendTableName\\\":\\\"wq_test_dataworks_pt_001\\\",\\\"foreignSchemaName\\\":\\\"public\\\",\\\"foreignTableName\\\":\\\"\\\"," + + "\\\"instanceId\\\":\\\"yongxunqa_holo_shanghai\\\",\\\"engineType\\\":\\\"Hologres\\\",\\\"clusteringKey\\\":[]," + + "\\\"bitmapIndexKey\\\":[],\\\"segmentKey\\\":[],\\\"dictionaryEncoding\\\":[]}\"\n" + + " }"; SpecNode node = new SpecNode(); SpecScript scr = new SpecScript(); SpecScriptRuntime rt = new SpecScriptRuntime(); diff --git a/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeInputOutputAdapterTest.java b/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeInputOutputAdapterTest.java new file mode 100644 index 0000000..7db5bb3 --- /dev/null +++ b/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/dw/nodemodel/DataWorksNodeInputOutputAdapterTest.java @@ -0,0 +1,251 @@ +/* + * Copyright (c) 2024, Alibaba Cloud; + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.aliyun.dataworks.common.spec.domain.dw.nodemodel; + +import java.util.Collections; +import java.util.List; + +import com.aliyun.dataworks.common.spec.SpecUtil; +import com.aliyun.dataworks.common.spec.domain.DataWorksWorkflowSpec; +import com.aliyun.dataworks.common.spec.domain.Specification; +import com.aliyun.dataworks.common.spec.domain.enums.DependencyType; +import com.aliyun.dataworks.common.spec.domain.enums.SpecKind; +import com.aliyun.dataworks.common.spec.domain.enums.SpecVersion; +import com.aliyun.dataworks.common.spec.domain.interfaces.Input; +import com.aliyun.dataworks.common.spec.domain.noref.SpecDepend; +import com.aliyun.dataworks.common.spec.domain.noref.SpecDoWhile; +import com.aliyun.dataworks.common.spec.domain.noref.SpecFlowDepend; +import com.aliyun.dataworks.common.spec.domain.ref.SpecNode; +import com.aliyun.dataworks.common.spec.domain.ref.SpecNodeOutput; +import com.aliyun.dataworks.common.spec.domain.ref.SpecWorkflow; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.CollectionUtils; +import org.junit.Assert; +import org.junit.Test; + +/** + * @author 聿剑 + * @date 2024/8/22 + */ +@Slf4j +public class DataWorksNodeInputOutputAdapterTest { + @Test + public void testGetInputsOfInnerNodeOfWorkflowNode() { + Specification spec = new Specification<>(); + spec.setVersion(SpecVersion.V_1_2_0.getLabel()); + spec.setKind(SpecKind.CYCLE_WORKFLOW.getLabel()); + DataWorksWorkflowSpec dataworksWorkflowSpec = new DataWorksWorkflowSpec(); + SpecWorkflow workflow = new SpecWorkflow(); + workflow.setId("workflow-id-1"); + + SpecNode dowhile = new SpecNode(); + dowhile.setId("workflow-dowhile-id-1"); + SpecDoWhile specDowhile = new SpecDoWhile(); + SpecNode specNodeWhile = new SpecNode(); + specNodeWhile.setId("workflow-dowhile-while-id-1"); + specDowhile.setSpecWhile(specNodeWhile); + SpecFlowDepend whileDep = new SpecFlowDepend(); + whileDep.setNodeId(specNodeWhile); + SpecDepend dep = new SpecDepend(); + SpecNodeOutput output = new SpecNodeOutput(); + output.setData("autotest.while_dep_1"); + dep.setOutput(output); + dep.setType(DependencyType.NORMAL); + whileDep.setDepends(Collections.singletonList(dep)); + specDowhile.setFlow(Collections.singletonList(whileDep)); + dowhile.setDoWhile(specDowhile); + + workflow.setNodes(Collections.singletonList(dowhile)); + dataworksWorkflowSpec.setWorkflows(Collections.singletonList(workflow)); + spec.setSpec(dataworksWorkflowSpec); + DataWorksNodeInputOutputAdapter adapter = new DataWorksNodeInputOutputAdapter(spec, specNodeWhile); + List inputs = adapter.getInputs(); + log.info("inputs: {}", inputs); + Assert.assertNotNull(inputs); + Assert.assertEquals(1, CollectionUtils.size(inputs)); + Assert.assertEquals(output.getData(), ((SpecNodeOutput)inputs.get(0)).getData()); + } + + @Test + public void testForeachInputContext() { + String specJson = "{\n" + + "\t\t\t\t\"metadata\": {\n" + + "\t\t\t\t\t\"gmtModified\": 1724643085000,\n" + + "\t\t\t\t\t\"uuid\": \"5055902565472511966\"\n" + + "\t\t\t\t},\n" + + "\t\t\t\t\"kind\": \"CycleWorkflow\",\n" + + "\t\t\t\t\"version\": \"1.1.0\",\n" + + "\t\t\t\t\"spec\": {\n" + + "\t\t\t\t\t\"owner\": \"1107550004253538\",\n" + + "\t\t\t\t\t\"name\": \"工作流挂载跨周期一级子节点依赖之后再挂载自依赖失败\",\n" + + "\t\t\t\t\t\"id\": \"6611006631241735927\",\n" + + "\t\t\t\t\t\"workflows\": [\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"owner\": \"1107550004253538\",\n" + + "\t\t\t\t\t\t\t\"outputs\": {\n" + + "\t\t\t\t\t\t\t\t\"nodeOutputs\": [\n" + + "\t\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\t\"artifactType\": \"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"isDefault\": true,\n" + + "\t\t\t\t\t\t\t\t\t\t\"data\": \"6611006631241735927\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"refTableName\": \"工作流挂载跨周期一级子节点依赖之后再挂载自依赖失败\"\n" + + "\t\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"metadata\": {\n" + + "\t\t\t\t\t\t\t\t\"owner\": \"1107550004253538\",\n" + + "\t\t\t\t\t\t\t\t\"ownerName\": \"dw_on_emr_qa3@test.aliyunid.com\",\n" + + "\t\t\t\t\t\t\t\t\"schedulerNodeId\": 700006666312,\n" + + "\t\t\t\t\t\t\t\t\"tenantId\": \"524257424564736\",\n" + + "\t\t\t\t\t\t\t\t\"project\": {\n" + + "\t\t\t\t\t\t\t\t\t\"mode\": \"SIMPLE\",\n" + + "\t\t\t\t\t\t\t\t\t\"projectOwnerId\": \"1107550004253538\",\n" + + "\t\t\t\t\t\t\t\t\t\"tenantId\": \"524257424564736\",\n" + + "\t\t\t\t\t\t\t\t\t\"simple\": true,\n" + + "\t\t\t\t\t\t\t\t\t\"projectIdentifier\": \"lwt_test_newIde\",\n" + + "\t\t\t\t\t\t\t\t\t\"projectName\": \"李文涛测试新版ide\",\n" + + "\t\t\t\t\t\t\t\t\t\"projectId\": \"528891\"\n" + + "\t\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\t\"projectId\": \"528891\"\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"nodes\": [\n" + + "\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\"owner\": \"1107550004253538\",\n" + + "\t\t\t\t\t\t\t\t\t\"outputs\": {\n" + + "\t\t\t\t\t\t\t\t\t\t\"nodeOutputs\": [\n" + + "\t\t\t\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\t\t\t\"artifactType\": \"NodeOutput\",\n" + + "\t\t\t\t\t\t\t\t\t\t\t\t\"isDefault\": true,\n" + + "\t\t\t\t\t\t\t\t\t\t\t\t\"data\": \"5055902565472511966\",\n" + + "\t\t\t\t\t\t\t\t\t\t\t\t\"refTableName\": \"ggg\"\n" + + "\t\t\t\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t\t\t\t]\n" + + "\t\t\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\t\t\"metadata\": {\n" + + "\t\t\t\t\t\t\t\t\t\t\"owner\": \"1107550004253538\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"container\": {\n" + + "\t\t\t\t\t\t\t\t\t\t\t\"type\": \"Flow\",\n" + + "\t\t\t\t\t\t\t\t\t\t\t\"uuid\": \"6611006631241735927\"\n" + + "\t\t\t\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\t\t\t\"ownerName\": \"dw_on_emr_qa3@test.aliyunid.com\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"schedulerNodeId\": 700006666313,\n" + + "\t\t\t\t\t\t\t\t\t\t\"tenantId\": \"524257424564736\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"project\": {\n" + + "\t\t\t\t\t\t\t\t\t\t\t\"mode\": \"SIMPLE\",\n" + + "\t\t\t\t\t\t\t\t\t\t\t\"projectOwnerId\": \"1107550004253538\",\n" + + "\t\t\t\t\t\t\t\t\t\t\t\"tenantId\": \"524257424564736\",\n" + + "\t\t\t\t\t\t\t\t\t\t\t\"simple\": true,\n" + + "\t\t\t\t\t\t\t\t\t\t\t\"projectIdentifier\": \"lwt_test_newIde\",\n" + + "\t\t\t\t\t\t\t\t\t\t\t\"projectName\": \"李文涛测试新版ide\",\n" + + "\t\t\t\t\t\t\t\t\t\t\t\"projectId\": \"528891\"\n" + + "\t\t\t\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\t\t\t\"projectId\": \"528891\"\n" + + "\t\t\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\t\t\"rerunInterval\": 180000,\n" + + "\t\t\t\t\t\t\t\t\t\"inputs\": {},\n" + + "\t\t\t\t\t\t\t\t\t\"rerunMode\": \"Allowed\",\n" + + "\t\t\t\t\t\t\t\t\t\"trigger\": {\n" + + "\t\t\t\t\t\t\t\t\t\t\"cron\": \"00 02 00 * * ?\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"delaySeconds\": 0,\n" + + "\t\t\t\t\t\t\t\t\t\t\"timezone\": \"Asia/Shanghai\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"startTime\": \"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"id\": \"9087147597187371929\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"endTime\": \"9999-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"type\": \"Scheduler\"\n" + + "\t\t\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\t\t\"timeout\": 0,\n" + + "\t\t\t\t\t\t\t\t\t\"script\": {\n" + + "\t\t\t\t\t\t\t\t\t\t\"path\": \"李文涛测试工作流/缺陷验证/工作流挂载跨周期一级子节点依赖之后再挂载自依赖失败/ggg\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"runtime\": {\n" + + "\t\t\t\t\t\t\t\t\t\t\t\"commandTypeId\": 6,\n" + + "\t\t\t\t\t\t\t\t\t\t\t\"command\": \"DIDE_SHELL\"\n" + + "\t\t\t\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\t\t\t\"id\": \"8209655082050446613\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"content\": \"#!/bin/bash\\n#********************************************************************#\\n##author" + + ":dw_on_emr_qa3@test.aliyunid.com\\n##create time:2024-08-26 " + + "11:31:26\\n#********************************************************************#\"\n" + + "\t\t\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\t\t\"recurrence\": \"Normal\",\n" + + "\t\t\t\t\t\t\t\t\t\"runtimeResource\": {\n" + + "\t\t\t\t\t\t\t\t\t\t\"resourceGroup\": \"group_524257424564736\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"resourceGroupId\": \"50414322\",\n" + + "\t\t\t\t\t\t\t\t\t\t\"id\": \"4780972265711308253\"\n" + + "\t\t\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\t\t\"rerunTimes\": 3,\n" + + "\t\t\t\t\t\t\t\t\t\"name\": \"ggg\",\n" + + "\t\t\t\t\t\t\t\t\t\"id\": \"5055902565472511966\",\n" + + "\t\t\t\t\t\t\t\t\t\"instanceMode\": \"T+1\"\n" + + "\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t],\n" + + "\t\t\t\t\t\t\t\"inputs\": {},\n" + + "\t\t\t\t\t\t\t\"name\": \"工作流挂载跨周期一级子节点依赖之后再挂载自依赖失败\",\n" + + "\t\t\t\t\t\t\t\"id\": \"6611006631241735927\",\n" + + "\t\t\t\t\t\t\t\"trigger\": {\n" + + "\t\t\t\t\t\t\t\t\"cron\": \"00 01 00 * * ?\",\n" + + "\t\t\t\t\t\t\t\t\"delaySeconds\": 0,\n" + + "\t\t\t\t\t\t\t\t\"timezone\": \"Asia/Shanghai\",\n" + + "\t\t\t\t\t\t\t\t\"startTime\": \"1970-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\t\"id\": \"6461813501878498656\",\n" + + "\t\t\t\t\t\t\t\t\"endTime\": \"9999-01-01 00:00:00\",\n" + + "\t\t\t\t\t\t\t\t\"type\": \"Scheduler\"\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"strategy\": {\n" + + "\t\t\t\t\t\t\t\t\"rerunInterval\": 180000,\n" + + "\t\t\t\t\t\t\t\t\"failureStrategy\": \"Break\",\n" + + "\t\t\t\t\t\t\t\t\"rerunTimes\": 3,\n" + + "\t\t\t\t\t\t\t\t\"rerunMode\": \"Allowed\",\n" + + "\t\t\t\t\t\t\t\t\"instanceMode\": \"T+1\",\n" + + "\t\t\t\t\t\t\t\t\"timeout\": 0\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"script\": {\n" + + "\t\t\t\t\t\t\t\t\"path\": \"李文涛测试工作流/缺陷验证/工作流挂载跨周期一级子节点依赖之后再挂载自依赖失败\",\n" + + "\t\t\t\t\t\t\t\t\"runtime\": {\n" + + "\t\t\t\t\t\t\t\t\t\"command\": \"WORKFLOW\"\n" + + "\t\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\t\"id\": \"7170058299331901324\"\n" + + "\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\"dependencies\": []\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t],\n" + + "\t\t\t\t\t\"type\": \"CycleWorkflow\",\n" + + "\t\t\t\t\t\"flow\": [\n" + + "\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\"depends\": [\n" + + "\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\"output\": \"6611006631241735927\",\n" + + "\t\t\t\t\t\t\t\t\t\"refTableName\": \"工作流挂载跨周期一级子节点依赖之后再挂载自依赖失败\",\n" + + "\t\t\t\t\t\t\t\t\t\"type\": \"CrossCycleDependsOnSelf\"\n" + + "\t\t\t\t\t\t\t\t},\n" + + "\t\t\t\t\t\t\t\t{\n" + + "\t\t\t\t\t\t\t\t\t\"output\": \"6611006631241735927\",\n" + + "\t\t\t\t\t\t\t\t\t\"refTableName\": \"工作流挂载跨周期一级子节点依赖之后再挂载自依赖失败\",\n" + + "\t\t\t\t\t\t\t\t\t\"type\": \"CrossCycleDependsOnChildren\"\n" + + "\t\t\t\t\t\t\t\t}\n" + + "\t\t\t\t\t\t\t],\n" + + "\t\t\t\t\t\t\t\"nodeId\": \"6611006631241735927\"\n" + + "\t\t\t\t\t\t}\n" + + "\t\t\t\t\t]\n" + + "\t\t\t\t}\n" + + "\t\t\t}"; + + Specification spec = SpecUtil.parseToDomain(specJson); + DataWorksNodeAdapter adapter = new DataWorksNodeAdapter(spec, spec.getSpec().getWorkflows().get(0)); + DwNodeDependentTypeInfo dep = adapter.getDependentType(n -> null); + log.info("inputs: {}", dep); + Assert.assertNotNull(dep); + Assert.assertEquals(DwNodeDependentTypeInfo.CHILD_AND_SELF, dep.getDependentType()); + } +} diff --git a/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/ref/SpecNodeTest.java b/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/ref/SpecNodeTest.java new file mode 100644 index 0000000..73595df --- /dev/null +++ b/spec/src/test/java/com/aliyun/dataworks/common/spec/domain/ref/SpecNodeTest.java @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2024, Alibaba Cloud; + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.aliyun.dataworks.common.spec.domain.ref; + +import java.util.Collections; + +import org.junit.Assert; +import org.junit.Test; + +/** + * @author 聿剑 + * @date 2024/7/29 + */ +public class SpecNodeTest { + @Test + public void testEquals() { + SpecNode n1 = new SpecNode(); + n1.setId("n1"); + n1.setName("n1"); + SpecDatasource ds1 = new SpecDatasource(); + ds1.setName("ds1"); + ds1.setType("odps"); + n1.setDatasource(ds1); + + SpecNode n2 = new SpecNode(); + n2.setId("n1"); + n2.setName("n1"); + SpecDatasource ds2 = new SpecDatasource(); + ds2.setName("ds2"); + ds2.setType("odps"); + n2.setDatasource(ds2); + + Assert.assertNotEquals(n1, n2); + + ds2.setName("ds1"); + Assert.assertEquals(ds1, ds2); + Assert.assertEquals(n1, n2); + + n2.setName("n2"); + Assert.assertNotEquals(n1, n2); + } + + @Test + public void testEqualsOutputInput() { + SpecNode n1 = new SpecNode(); + n1.setId("n1"); + n1.setName("n1"); + SpecDatasource ds1 = new SpecDatasource(); + ds1.setName("ds1"); + ds1.setType("odps"); + n1.setDatasource(ds1); + + SpecNode n2 = new SpecNode(); + n2.setId("n1"); + n2.setName("n1"); + SpecDatasource ds2 = new SpecDatasource(); + ds2.setName("ds2"); + ds2.setType("odps"); + n2.setDatasource(ds2); + + Assert.assertNotEquals(n1, n2); + + ds2.setName("ds1"); + Assert.assertEquals(ds1, ds2); + Assert.assertEquals(n1, n2); + + SpecNodeOutput in1 = new SpecNodeOutput(); + in1.setData("output1"); + in1.setRefTableName("refTable1"); + n1.setInputs(Collections.singletonList(in1)); + + SpecNodeOutput in2 = new SpecNodeOutput(); + in2.setData("output1"); + in2.setRefTableName("refTable1"); + n2.setInputs(Collections.singletonList(in2)); + Assert.assertEquals(n1, n2); + } +} diff --git a/spec/src/test/java/com/aliyun/dataworks/common/spec/parser/impl/SpecScriptRuntimeParserTest.java b/spec/src/test/java/com/aliyun/dataworks/common/spec/parser/impl/SpecScriptRuntimeParserTest.java index 0ef0051..f416333 100644 --- a/spec/src/test/java/com/aliyun/dataworks/common/spec/parser/impl/SpecScriptRuntimeParserTest.java +++ b/spec/src/test/java/com/aliyun/dataworks/common/spec/parser/impl/SpecScriptRuntimeParserTest.java @@ -42,6 +42,7 @@ public void testParse() throws Exception { specContainer.setImageId("image_id"); param.setContainer(specContainer); param.setCommand("PYTHON"); + param.setCu("0.5"); Map map = JSON.parseObject(JSON.toJSONString(param), Map.class);