Skip to content

Commit

Permalink
Merge pull request #6 from wxgzgl/master
Browse files Browse the repository at this point in the history
20201012
  • Loading branch information
jinguangyang committed Oct 12, 2020
2 parents da6404c + f0f17a7 commit 1cf5e81
Show file tree
Hide file tree
Showing 26 changed files with 932 additions and 1,127 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
*/target/*
data/*
*/*.tar.gz
*/*/test/*.java
*/*/test/*.java
62 changes: 26 additions & 36 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ LarkMidTable 是一站式开源的数据中台,实现元数据管理,数据



[![Stargazers over time](https://starchart.cc/wxgzgl/larkMidTable.svg)](#)

# **产品愿景**

1.满足许多的小企业,提供一站式的解决方案。
Expand All @@ -20,55 +22,39 @@ LarkMidTable 是一站式开源的数据中台,实现元数据管理,数据



# 技术选型

| 框架名称 | 框架用途 | 主要功能 |
| ------------------------------------------------------------ | ------------------ | ------------------------------------------------------------ |
| [Dolphin](https://github.com/apache/incubator-dolphinscheduler) | 任务调度 | Task以DAG形式关联,实时监控任务的状态,支持Shell、MR、Spark、SQL、依赖等10多种任务类型,支持每日十万数据量级任务稳定运行 |
| [Flink](https://github.com/apache/flink) | 离线和实时计算框架 | Flink实现流批一体化、机器学习(FlinkML)、图分析(Gelly)、复杂事件处理(CEP)、关系数据处理(Table) |
| [Hive](https://github.com/apache/hive) | 数据仓库 | 将结构化的数据文件映射为一张数据库表,并提供SQL查询功能,能将SQL语句语句转变成MapReduce任务来执行。 |
| [Kylin](https://github.com/apache/kylin) | 分析数据库 | Kylin 支持 SQL,Kylin 的 SQL on Hbase |
| [Kafka](https://github.com/apache/kafka) | 消息中间件 | 应用解耦、异步处理、流量削峰、日志处理、消息通信 |
| [Kubernetes](https://github.com/kubernetes/kubernetes) | 容器部署 | 重新启动失败的容器、弹性伸缩、服务的自动发现和负载均衡、滚动升级和一键回滚 |
| [Zookeeper](https://github.com/apache/zookeeper) | 分布式协调服务 | 统一命名服务、配置管理、集群管理、队列管理 |



# 产品架构图

![系统架构图](https://img2020.cnblogs.com/blog/622382/202009/622382-20200930001500385-1504321257.jpg)



## 目前支持的数据库
# 支持的数据库

| 数据库类型 | 读取类型 | 写入类型 |
| :----------- | -------- | -------- |
| **离线数据** | | |
| Mysql | 支持 | 支持 |
| PostgreSql | 支持 | 支持 |
| Mongodb | 支持 | 支持 |
| SqlServer | | |
| ClickHouse | | |
| Hive | | 支持 |
| Hbase | | |
| Hdfs | 支持 | 支持 |
| | | |
| **实时数据** | | |
| Kafka | | |
| | 数据库类型 | 读取类型 | 写入类型 |
| ---------- | :------------- | -------- | -------- |
| 批处理同步 | Mysql | 支持 | 支持 |
| | PostgreSql | 支持 | 支持 |
| | Mongodb | 支持 | 支持 |
| | SqlServer | 支持 | 支持 |
| | Oracle | | |
| | ClickHouse | | |
| | Hive | | 支持 |
| | Hbase | 支持 | 支持 |
| | Hdfs | 支持 | 支持 |
| | ElasticSearch | | |
| | Kudu | | |
| | Redis | | |
| 流处理同步 | Kafka | | |
| | MySQL Binlog | | |
| | MongoDB Oplog | | |
| | PostgreSQL WAL | | |



# **快速开始**

请点击 [快速开始](https://github.com/wxgzgl/flinkx-web/blob/master/userGuid.md)

前端代码 [前端代码](https://github.com/wxgzgl/LarkMidTableUI)

资源库 [研发资源库]( https://github.com/wxgzgl/flinkx-web/blob/master/docs/list.md )

开发规范 [唯品会开发规范](https://vipshop.github.io/vjtools/#/standard/)



# 特别鸣谢
Expand All @@ -81,4 +67,8 @@ LarkMidTable 是一站式开源的数据中台,实现元数据管理,数据

搜索QQ群号[678097205]或者扫描下面的二维码进入LarkMidTable 社区的QQ群,目前正是开源的初期阶段,群里达到91人

**![QQ群](https://img2020.cnblogs.com/blog/622382/202009/622382-20200907124358049-997953244.png)**
前端代码: [前端代码](https://github.com/wxgzgl/LarkMidTableUI)

资源库: [研发资源库]( https://github.com/wxgzgl/flinkx-web/blob/master/docs/list.md )

**![QQ群](https://img2020.cnblogs.com/blog/622382/202009/622382-20200907124358049-997953244.png)**
14 changes: 0 additions & 14 deletions docs/k8s1.md

This file was deleted.

124 changes: 0 additions & 124 deletions docs/linuxInstall.md

This file was deleted.

1 change: 0 additions & 1 deletion docs/store.md

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package com.guoliang.flinkx.admin.dto;

import lombok.Data;

import java.io.Serializable;

/**
*
* @author gavin
* @ClassName clickhouse reader dto
* @Version 2.0
* @since 2020/9/29
*/
@Data
public class ClickhouseReaderDto implements Serializable {
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package com.guoliang.flinkx.admin.dto;

import lombok.Data;

import java.io.Serializable;

/**
*
* @author gavin
* @ClassName clickhouse write dto
* @Version 2.0
* @since 2020/9/29
*/
@Data
public class ClickhouseWriterDto implements Serializable {
}
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,8 @@ public class FlinkXJsonBuildDto implements Serializable {
private MongoDBReaderDto mongoDBReader;

private MongoDBWriterDto mongoDBWriter;

private ClickhouseReaderDto clickhouseReader;

private ClickhouseWriterDto clickhouseWriter;
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,9 @@
import com.guoliang.flinkx.admin.entity.JobDatasource;
import com.guoliang.flinkx.admin.tool.flinkx.reader.*;
import com.guoliang.flinkx.admin.tool.flinkx.writer.*;
import com.guoliang.flinkx.admin.tool.pojo.*;
import com.guoliang.flinkx.admin.util.JdbcConstants;
import com.guoliang.flinkx.core.util.Constants;
import com.guoliang.flinkx.admin.tool.pojo.FlinkxHbasePojo;
import com.guoliang.flinkx.admin.tool.pojo.FlinkxHivePojo;
import com.guoliang.flinkx.admin.tool.pojo.FlinkxMongoDBPojo;
import com.guoliang.flinkx.admin.tool.pojo.FlinkxRdbmsPojo;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import org.springframework.util.CollectionUtils;
Expand Down Expand Up @@ -80,6 +77,10 @@ public class FlinkxJsonHelper implements FlinkxJsonInterface {

private MongoDBWriterDto mongoDBWriterDto;

private ClickhouseReaderDto clickhouseReaderDto;

private ClickhouseWriterDto clickhouseWriterDto;


//用于保存额外参数
private Map<String, Object> extraParams = Maps.newHashMap();
Expand All @@ -92,6 +93,7 @@ public void initReader(FlinkXJsonBuildDto flinkxJsonDto, JobDatasource readerDat
this.hiveReaderDto = flinkxJsonDto.getHiveReader();
this.rdbmsReaderDto = flinkxJsonDto.getRdbmsReader();
this.hbaseReaderDto = flinkxJsonDto.getHbaseReader();
this.clickhouseReaderDto = flinkxJsonDto.getClickhouseReader();
// reader 插件
String datasource = readerDatasource.getDatasource();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
public class HBaseReader extends BaseReaderPlugin implements FlinkxReaderInterface {
@Override
public String getName() {
return "hbase11xreader";
return "hbasereader";
}

@Override
Expand All @@ -23,7 +23,11 @@ public Map<String, Object> buildHbase(FlinkxHbasePojo plugin) {
readerObj.put("name", getName());
Map<String, Object> parameterObj = Maps.newLinkedHashMap();
Map<String, Object> confige = Maps.newLinkedHashMap();
confige.put("hbase.zookeeper.quorum",plugin.getReaderHbaseConfig());
confige.put("hbase.zookeeper.property.clientPort", plugin.getReaderHbaseConfig().split(":")[1]);
// confige.put("hbase.rootdir", plugin.getWriterHbaseConfig());
confige.put("hbase.cluster.distributed", "true");
confige.put("hbase.zookeeper.quorum", plugin.getReaderHbaseConfig().split(":")[0]);
confige.put("zookeeper.znode.parent", "/hbase");
parameterObj.put("hbaseConfig", confige);
parameterObj.put("table", plugin.getReaderTable());
parameterObj.put("mode", plugin.getReaderMode());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
public class HBaseWriter extends BaseWriterPlugin implements FlinkxWriterInterface {
@Override
public String getName() {
return "hbase11xwriter";
return "hbasewriter";
}

@Override
Expand All @@ -24,12 +24,17 @@ public Map<String, Object> buildHbase(FlinkxHbasePojo plugin) {
writerObj.put("name", getName());
Map<String, Object> parameterObj = Maps.newLinkedHashMap();
Map<String, Object> confige = Maps.newLinkedHashMap();
confige.put("hbase.zookeeper.quorum", plugin.getWriterHbaseConfig());

confige.put("hbase.zookeeper.property.clientPort", plugin.getWriterHbaseConfig().split(":")[1]);
// confige.put("hbase.rootdir", plugin.getWriterHbaseConfig());
confige.put("hbase.cluster.distributed", "true");
confige.put("hbase.zookeeper.quorum", plugin.getWriterHbaseConfig().split(":")[0]);
confige.put("zookeeper.znode.parent", "/hbase");
parameterObj.put("hbaseConfig", confige);
parameterObj.put("table", plugin.getWriterTable());
parameterObj.put("mode", plugin.getWriterMode());
parameterObj.put("column", plugin.getColumns());
parameterObj.put("rowkeyColumn", JSON.parseArray(plugin.getWriterRowkeyColumn()));
parameterObj.put("rowkeyColumn", plugin.getWriterRowkeyColumn());
if (StringUtils.isNotBlank(plugin.getWriterVersionColumn().getValue())) {
parameterObj.put("versionColumn", plugin.getWriterVersionColumn());
}
Expand Down
Loading

0 comments on commit 1cf5e81

Please sign in to comment.