-
Notifications
You must be signed in to change notification settings - Fork 583
/
MetadataCollector.java
156 lines (141 loc) · 6.55 KB
/
MetadataCollector.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
package com.qihoo.qsql.metadata.collect;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qihoo.qsql.metadata.ColumnValue;
import com.qihoo.qsql.metadata.MetadataClient;
import com.qihoo.qsql.metadata.collect.dto.ElasticsearchProp;
import com.qihoo.qsql.metadata.collect.dto.HiveProp;
import com.qihoo.qsql.metadata.collect.dto.JdbcProp;
import com.qihoo.qsql.metadata.collect.dto.MongoPro;
import com.qihoo.qsql.metadata.entity.DatabaseParamValue;
import com.qihoo.qsql.metadata.entity.DatabaseValue;
import com.qihoo.qsql.metadata.entity.TableValue;
import com.qihoo.qsql.org.apache.calcite.tools.JdbcSourceInfo;
import java.io.File;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.apache.log4j.PropertyConfigurator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class MetadataCollector {
private static final Logger LOGGER = LoggerFactory.getLogger(MetadataCollector.class);
private static ObjectMapper mapper = new ObjectMapper();
static {
String logProp;
if (((logProp = System.getenv("QSQL_HOME")) != null) && !logProp.isEmpty()) {
PropertyConfigurator.configure(logProp
+ File.separator + "conf" + File.separator + "log4j.properties");
}
}
String filterRegexp;
private MetadataClient client = new MetadataClient();
MetadataCollector(String filterRegexp) throws SQLException {
this.filterRegexp = filterRegexp;
}
/**
* .
*/
public static MetadataCollector create(String json, String dataSource, String regexp) {
try {
LOGGER.info("Connecting server.....");
dataSource = dataSource.toLowerCase();
Map<String, Map<String, String>> sourceMap = JdbcSourceInfo.getSourceMap();
if (sourceMap.containsKey(dataSource)) {
String collectorClassName = sourceMap.get(dataSource).get("collectorClass");
if ("hive".equals(collectorClassName)) {
return new HIveJdbcCollector(mapper.readValue(json, HiveProp.class),regexp,dataSource);
}else {
return new JdbcCollector(mapper.readValue(json, JdbcProp.class),regexp, sourceMap.get(dataSource),
dataSource);
}
}
switch (dataSource.toLowerCase()) {
case "hive":
return new HiveCollector(
mapper.readValue(json, HiveProp.class), regexp);
case "es":
case "elasticsearch":
return new ElasticsearchCollector(
mapper.readValue(json, ElasticsearchProp.class), regexp);
case "mongo":
return new MongoCollector(
mapper.readValue(json, MongoPro.class), regexp) {
};
default:
throw new RuntimeException("Unsupported datasource.");
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
/**
* entrance.
*/
public static void main(String[] args) throws SQLException {
if (args.length < 2) {
throw new RuntimeException("Required conn info and type at least");
}
LOGGER.info("Input params: properties({}), type({}), filter regex({})",
args[0], args[1], args[2]);
MetadataCollector.create(args[0], args[1], args[2]).execute();
System.exit(0);
}
/**
* .
*/
public void execute() throws SQLException {
try {
LOGGER.info("Connected successfully!!");
client.setAutoCommit(false);
DatabaseValue dbValue = convertDatabaseValue();
Long dbId;
DatabaseValue origin = client.getBasicDatabaseInfo(dbValue.getName());
if (Objects.isNull(origin)) {
dbId = client.insertBasicDatabaseInfo(dbValue);
List<DatabaseParamValue> dbParams = convertDatabaseParamValue(dbId);
client.insertDatabaseSchema(dbParams);
LOGGER.info("Insert database {} successfully!!", dbValue.getName());
} else {
dbId = origin.getDbId();
LOGGER.info("Reuse database {}!!", dbValue);
}
List<String> tableNames = getTableNameList();
tableNames.forEach(tableName -> {
Long tbId;
List<TableValue> originTable = client.getTableSchema(tableName);
if (originTable.stream().noneMatch(val -> val.getDbId().equals(dbId))) {
TableValue tableValue = convertTableValue(dbId, tableName);
tbId = client.insertTableSchema(tableValue);
LOGGER.info("Insert table {} successfully!!", tableValue.getTblName());
List<ColumnValue> cols = convertColumnValue(tbId, tableName, dbValue.getName());
if (cols.size() == 0) {
throw new RuntimeException("No column found in table '" + tableName + "'.");
}
client.insertFieldsSchema(cols);
} else {
TableValue shoot = originTable.stream()
.filter(val -> val.getDbId().equals(dbId)).findFirst()
.orElseThrow(() -> new RuntimeException("Query table error."));
tbId = shoot.getTblId();
LOGGER.info("Reuse table {}!!", shoot.getTblName());
client.deleteFieldsSchema(tbId);
LOGGER.info("Delete fields of table {}!!", shoot.getTblName());
List<ColumnValue> cols = convertColumnValue(tbId, tableName, dbValue.getName());
client.insertFieldsSchema(cols);
}
});
client.commit();
LOGGER.info("Successfully collected metadata for {} tables!!", tableNames.size());
LOGGER.info(tableNames.stream().reduce((x, y) -> x + "\n" + y).orElse(""));
} catch (SQLException ex) {
client.rollback();
LOGGER.info("Collect metadata failed!!");
}
}
protected abstract DatabaseValue convertDatabaseValue();
protected abstract List<DatabaseParamValue> convertDatabaseParamValue(Long dbId);
protected abstract TableValue convertTableValue(Long dbId, String tableName);
protected abstract List<ColumnValue> convertColumnValue(Long tbId, String tableName, String dbName);
protected abstract List<String> getTableNameList();
}