Skip to content

Commit

Permalink
[WIP] compatible with both flink yarn/k8s modes task submission/termi…
Browse files Browse the repository at this point in the history
…nation (#261)

* [feature] flink k8s native mode support

* [feature] flink k8s native mode support

* [issue#220] refactoring SubmitRequest, SubmitResponse to adapt k8s submit operations

* [issue#220] refactoring SubmitRequest, SubmitResponse to adapt k8s submit operations

* [issue#220] New dto object for flink stop action parameter transfer process

* [issue#220] refactor: move the parameters of the flink stop method to a dedicated dto object

* modify configuration constants of workspace(#251)

* typo(#251)

* add isAnyBank method(#251)

* add unified fs operator defined(#251)

* register FsOperator to SpringBoot Bean(#251)

* remove unnecessary import(#251)

* extend the signature of method upload, copy, copyDir(#251)

* Separate workspace storage type into configuration(#251)

* Separate workspace storage type into configuration(#251)

* add fileMd5 method(#251)

* replace the code reference of HdfsUtils to FsOperator(#251)

* change the bean injection behavior of FsOperator(#251)

* change the config key of streamx.workspace(#251)

* fix stack overflow bug

* LfsOperator.upload support dir source

* Update ConfigConst.scala

* Update HdfsOperator.scala

* Update LfsOperator.scala

* Update UnifiledFsOperator.scala

* Update Utils.scala

* compatible with flink k8s submit

* compatible with flink k8s submit

Co-authored-by: benjobs <benjobs@qq.com>
  • Loading branch information
Al-assad and wolfboys committed Jul 26, 2021
1 parent 87022d2 commit 7ae4d15
Show file tree
Hide file tree
Showing 30 changed files with 1,077 additions and 218 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@
*/
package com.streamxhub.streamx.common.conf

import com.streamxhub.streamx.common.enums.StorageType

object ConfigConst {
/**
*
Expand Down Expand Up @@ -281,37 +283,47 @@ object ConfigConst {

val KEY_ES_CLIENT_TRANSPORT_SNIFF = "client.transport.sniff"

val STREAMX_HDFS_WORKSPACE_DEFAULT = "/streamx"
val KEY_STREAMX_WORKSPACE = "streamx.workspace.path"

val STREAMX_WORKSPACE_DEFAULT = "/streamx"

val KEY_STREAMX_HDFS_WORKSPACE = "streamx.hdfs.workspace"
val KEY_STREAMX_WORKSPACE_TYPE = "streamx.workspace.type"

lazy val HDFS_WORKSPACE: String = {
val workspace = System.getProperties.getProperty(KEY_STREAMX_HDFS_WORKSPACE, STREAMX_HDFS_WORKSPACE_DEFAULT)
require(!workspace.startsWith("hdfs://"))
workspace
}
/**
* optional value from {@link StorageType}
*/
val STREAMX_WORKSPACE_TYPE_DEFAULT = "lfs"

lazy val APP_PLUGINS = s"$HDFS_WORKSPACE/plugins"
lazy val WORKSPACE: String = System.getProperties.getProperty(KEY_STREAMX_WORKSPACE, STREAMX_WORKSPACE_DEFAULT)

lazy val WORKSPACE_TYPE: StorageType = StorageType.of(
System.getProperties.getProperty(
KEY_STREAMX_WORKSPACE_TYPE,
STREAMX_WORKSPACE_TYPE_DEFAULT
)
)

lazy val APP_PLUGINS = s"$WORKSPACE/plugins"

/**
* 存放不同版本flink相关的jar
*/
lazy val APP_SHIMS = s"$HDFS_WORKSPACE/shims"
lazy val APP_SHIMS = s"$WORKSPACE/shims"

lazy val APP_UPLOADS = s"$HDFS_WORKSPACE/uploads"
lazy val APP_UPLOADS = s"$WORKSPACE/uploads"

lazy val APP_WORKSPACE = s"$HDFS_WORKSPACE/workspace"
lazy val APP_WORKSPACE = s"$WORKSPACE/workspace"

lazy val APP_FLINK = s"$HDFS_WORKSPACE/flink"
lazy val APP_FLINK = s"$WORKSPACE/flink"

lazy val APP_BACKUPS = s"$HDFS_WORKSPACE/backups"
lazy val APP_BACKUPS = s"$WORKSPACE/backups"

lazy val APP_SAVEPOINTS = s"$HDFS_WORKSPACE/savepoints"
lazy val APP_SAVEPOINTS = s"$WORKSPACE/savepoints"

/**
* 存放全局公共的jar
*/
lazy val APP_JARS = s"$HDFS_WORKSPACE/jars"
lazy val APP_JARS = s"$WORKSPACE/jars"

val LOGO =
"""
Expand Down Expand Up @@ -360,6 +372,7 @@ object ConfigConst {
|
|""".stripMargin


}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,9 @@ public enum ExecutionMode {
YARN_PRE_JOB(2, "yarn-pre-job"),
YARN_SESSION(3, "yarn-session"),
APPLICATION(4, "yarn-application"),
KUBERNETES(5, "kubernetes");
KUBERNETES_NATIVE_SESSION(5, "kubernetes-session"),
KUBERNETES_NATIVE_APPLICATION(6,"kubernetes-application");


private Integer mode;
private String name;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
* Copyright (c) 2021 The StreamX Project
* <p>
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package com.streamxhub.streamx.common.enums;

import org.apache.commons.lang3.StringUtils;

public enum StorageType {

HDFS("hdfs"),
LFS("lfs");

private final String identifier;

StorageType(String identifier) {
this.identifier = identifier;
}

public String getIdentifier() {
return identifier;
}

public static StorageType of(String identifier) {
if (StringUtils.isEmpty(identifier)) {
return LFS;
}
for (StorageType type : values()) {
if (type.identifier.equals(identifier)) {
return type;
}
}
return LFS;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
/*
* Copyright (c) 2021 The StreamX Project
* <p>
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package com.streamxhub.streamx.common.fs

trait FsOperator {

def exists(path: String): Boolean

def mkdirs(path: String): Unit

def delete(path: String): Unit

def upload(srcPath: String, dstPath: String): Unit = upload(srcPath, dstPath, false, true)

def copy(srcPath: String, dstPath: String): Unit = copy(srcPath, dstPath, false, true)

def copyDir(srcPath: String, dstPath: String): Unit = copyDir(srcPath, dstPath, false, true)

def upload(srcPath: String, dstPath: String, delSrc: Boolean = false, overwrite: Boolean = true): Unit

def copy(srcPath: String, dstPath: String, delSrc: Boolean = false, overwrite: Boolean = true): Unit

def copyDir(srcPath: String, dstPath: String, delSrc: Boolean = false, overwrite: Boolean = true): Unit

def move(srcPath: String, dstPath: String): Unit

def fileMd5(path: String): String


}
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/*
* Copyright (c) 2021 The StreamX Project
* <p>
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package com.streamxhub.streamx.common.fs

import com.streamxhub.streamx.common.util.{HdfsUtils, Logger}

/**
* Hadoop File System (aka HDFS) Operator
*/
object HdfsOperator extends FsOperator with Logger {

override def exists(path: String): Boolean = HdfsUtils.exists(toHdfsPath(path))

override def mkdirs(path: String): Unit = HdfsUtils.mkdirs(toHdfsPath(path))

override def delete(path: String): Unit = HdfsUtils.delete(toHdfsPath(path))

override def move(srcPath: String, dstPath: String): Unit = HdfsUtils.move(toHdfsPath(srcPath), toHdfsPath(dstPath))

override def upload(srcPath: String, dstPath: String, delSrc: Boolean, overwrite: Boolean): Unit =
HdfsUtils.upload(toHdfsPath(srcPath), toHdfsPath(dstPath), delSrc = delSrc, overwrite = overwrite)

override def copy(srcPath: String, dstPath: String, delSrc: Boolean, overwrite: Boolean): Unit =
HdfsUtils.copyHdfs(toHdfsPath(srcPath), toHdfsPath(dstPath), delSrc = delSrc, overwrite = overwrite)

override def copyDir(srcPath: String, dstPath: String, delSrc: Boolean, overwrite: Boolean): Unit =
HdfsUtils.copyHdfsDir(toHdfsPath(srcPath), toHdfsPath(dstPath), delSrc = delSrc, overwrite = overwrite)

override def fileMd5(path: String): String = HdfsUtils.fileMd5(toHdfsPath(path))

private def toHdfsPath(path: String): String = {
path match {
case x if x.startsWith("hdfs://") => x
case p => HdfsUtils.getDefaultFS.concat(p)
}
}

}


Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
/*
* Copyright (c) 2021 The StreamX Project
* <p>
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package com.streamxhub.streamx.common.fs

import com.streamxhub.streamx.common.util.Logger
import com.streamxhub.streamx.common.util.Utils.{isAnyBank, notEmpty}
import org.apache.commons.codec.digest.DigestUtils
import org.apache.commons.io.{FileUtils, IOUtils}
import org.apache.commons.lang.StringUtils

import java.io.{File, FileInputStream}

/**
* Local File System (aka LFS) Operator
*/
//noinspection DuplicatedCode
object LfsOperator extends FsOperator with Logger {

override def exists(path: String): Boolean = {
StringUtils.isNotBlank(path) && new File(path).exists()
}

override def mkdirs(path: String): Unit = {
if (!isAnyBank(path)) {
FileUtils.forceMkdir(new File(path))
}
}

override def delete(path: String): Unit = {
if (notEmpty(path)) {
val file = new File(path)
if (!file.exists()) {
logWarn(s"delete file: file is no exists, ${path}")
} else {
FileUtils.forceDelete(file)
}
}
}

override def move(srcPath: String, dstPath: String): Unit = {
if (!isAnyBank(srcPath, dstPath)) {
val srcFile = new File(srcPath)
var dstFile = new File(dstPath)
if (dstFile.isDirectory) {
dstFile = new File(dstFile.getAbsolutePath.concat("/").concat(srcFile.getName))
}
if (srcFile.getCanonicalPath != dstFile.getCanonicalPath) {
FileUtils.moveFile(srcFile, dstFile)
}
}
}

override def upload(srcPath: String, dstPath: String, delSrc: Boolean, overwrite: Boolean): Unit = {
if (new File(srcPath).isDirectory) {
copyDir(srcPath, dstPath, delSrc, overwrite)
} else {
copy(srcPath, dstPath, delSrc, overwrite)
}
}

override def copy(srcPath: String, dstPath: String, delSrc: Boolean, overwrite: Boolean): Unit = {
if (!isAnyBank(srcPath, dstPath)) {
val srcFile = new File(srcPath)
var dstFile = new File(dstPath)
if (dstFile.isDirectory) {
dstFile = new File(dstFile.getAbsolutePath.concat("/").concat(srcFile.getName))
}
if (overwrite && !dstFile.exists() && srcFile.getCanonicalPath != dstFile.getCanonicalPath) {
FileUtils.copyFile(srcFile, dstFile)
}
}
}

override def copyDir(srcPath: String, dstPath: String, delSrc: Boolean, overwrite: Boolean): Unit = {
if (!isAnyBank(srcPath, dstPath)) {
val srcFile = new File(srcPath)
val dstFile = new File(dstPath)
if (overwrite && !dstFile.exists() && srcFile.getCanonicalPath != dstFile.getCanonicalPath) {
FileUtils.copyDirectory(new File(srcPath), new File(dstPath))
}
}
}

override def fileMd5(path: String): String = {
DigestUtils.md5Hex(IOUtils.toByteArray(new FileInputStream(path)))
}

}



0 comments on commit 7ae4d15

Please sign in to comment.