Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[BitSail] Add connector kudu #89

Merged
merged 49 commits into from
Nov 8, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
49 commits
Select commit Hold shift + click to select a range
615e582
Initialize kudu connector module.
BlockLiu Oct 27, 2022
aefe9d1
add kudu test
BlockLiu Oct 28, 2022
28de78b
Merge remote-tracking branch 'upstream/master' into add-connector-kudu
BlockLiu Oct 31, 2022
fbf867b
Initialize kudu writer.
BlockLiu Oct 31, 2022
d7e05b4
Merge remote-tracking branch 'upstream/master' into add-connector-kudu
BlockLiu Oct 31, 2022
d7ad17c
Finish kudu writer.
BlockLiu Nov 1, 2022
fc13352
Add type converter file.
BlockLiu Nov 1, 2022
60512c5
Add kudu writer itcase test.
BlockLiu Nov 1, 2022
576756e
Fix kudu writer itcase test.
BlockLiu Nov 1, 2022
3d8a7f3
Initialize kudu reader
BlockLiu Nov 2, 2022
1474d1d
Merge remote-tracking branch 'upstream/master' into add-connector-kudu
BlockLiu Nov 2, 2022
5e34e03
add license
BlockLiu Nov 2, 2022
59953b1
Add kudu split constructor.
BlockLiu Nov 2, 2022
5717d26
Add source for kudu.
BlockLiu Nov 2, 2022
292d2d7
Fix serializable things.
BlockLiu Nov 3, 2022
90136a4
Add row converters.
BlockLiu Nov 3, 2022
fa170cf
Fix checkstyle.
BlockLiu Nov 3, 2022
ecbee24
Merge remote-tracking branch 'upstream/master' into add-connector-kudu
BlockLiu Nov 3, 2022
7428e49
Finish kudu source.
BlockLiu Nov 3, 2022
e55c337
Fix bug that schema format cannot be serialized.
BlockLiu Nov 4, 2022
a30d100
Merge remote-tracking branch 'upstream/master' into add-connector-kudu
BlockLiu Nov 4, 2022
dad6340
Add compile/package info.
BlockLiu Nov 4, 2022
0820520
Move fake row generator as a class.
BlockLiu Nov 4, 2022
7e42d2f
Fix read/writer null value.
BlockLiu Nov 4, 2022
f29b80a
Add null value in itcase test.
BlockLiu Nov 4, 2022
dc9490c
Optimize reader itcase test.
BlockLiu Nov 4, 2022
ffa3cbd
Merge remote-tracking branch 'upstream/master' into add-connector-kudu
BlockLiu Nov 4, 2022
182f23a
Fix bug for the split coordinator.
BlockLiu Nov 7, 2022
6a0493a
Optimize kudu source reader.
BlockLiu Nov 7, 2022
accb1dd
Add license.
BlockLiu Nov 7, 2022
640f715
Merge remote-tracking branch 'upstream/master' into add-connector-kudu
BlockLiu Nov 7, 2022
c52f7d2
fix typo.
BlockLiu Nov 7, 2022
b3e2e65
Optimize reader itcase.
BlockLiu Nov 7, 2022
9e84f6e
fix lib jar name.
BlockLiu Nov 7, 2022
26f315e
Merge branch 'bytedance:master' into add-connector-kudu
BlockLiu Nov 7, 2022
4bf07c1
Merge branch 'add-connector-kudu' of https://github.com/BlockLiu/bits…
BlockLiu Nov 7, 2022
5ad6295
Remove all writer generator.
BlockLiu Nov 7, 2022
8daca4a
Optimize itcase test.
BlockLiu Nov 7, 2022
63184a7
Add more unit test.
BlockLiu Nov 7, 2022
496aa5d
Merge remote-tracking branch 'upstream/master' into add-connector-kudu
BlockLiu Nov 7, 2022
0eeff1d
Merge branch 'bytedance:master' into add-connector-kudu
BlockLiu Nov 7, 2022
892a563
Refactor itcase to a new module.
BlockLiu Nov 7, 2022
cf89821
Add docs for kudu connector.
BlockLiu Nov 7, 2022
ae9a95c
Optimize as comment
BlockLiu Nov 8, 2022
3ee592d
Rename kudu itcase module.
BlockLiu Nov 8, 2022
562dc5f
Merge remote-tracking branch 'upstream/master' into add-connector-kudu
BlockLiu Nov 8, 2022
23c711e
Merge branch 'bytedance:master' into add-connector-kudu
BlockLiu Nov 8, 2022
ccba58a
Merge branch 'add-connector-kudu' of https://github.com/BlockLiu/bits…
BlockLiu Nov 8, 2022
5d63eb7
Add license.
BlockLiu Nov 8, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,15 @@
import com.bytedance.bitsail.common.type.BitSailTypeInfoConverter;
import com.bytedance.bitsail.common.type.TypeInfoConverter;

import java.io.IOException;
import java.io.Serializable;

public interface Source<T, SplitT extends SourceSplit, StateT extends Serializable> extends Serializable {

/**
* Run in client side for source initialize;
*/
void configure(ExecutionEnviron execution, BitSailConfiguration readerConfiguration);
void configure(ExecutionEnviron execution, BitSailConfiguration readerConfiguration) throws IOException;

/**
* Indicate the Source type.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@

import com.bytedance.bitsail.common.typeinfo.TypeInfo;

public interface DeserializationFormat<I, O> {
import java.io.Serializable;

public interface DeserializationFormat<I, O> extends Serializable {

DeserializationSchema<I, O> createRuntimeDeserializationSchema(TypeInfo<?>[] typeInfos);
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@

import com.bytedance.bitsail.common.typeinfo.TypeInfo;

public interface SerializationFormat<T> {
import java.io.Serializable;

public interface SerializationFormat<T> extends Serializable {

T createRuntimeSerializationSchema(TypeInfo<?>[] typeInfos);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,10 @@ public Date getDate(int pos) {
return (Date) this.fields[pos];
}

public java.sql.Date getSqlDate(int pos) {
return (java.sql.Date) this.fields[pos];
}

public long getLong(int pos) {
return (Long) this.fields[pos];
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
]
},
"writer": {
"class": "com.bytedance.bitsail.connector.doris.sink.DorisWriterGenerator",
"class": "com.bytedance.bitsail.connector.doris.sink.DorisSink",
"fe_hosts": "<your doris fe hosts>",
"mysql_hosts": "<your doris jdbc hosts>",
"user": "<user name>",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@
"com.bytedance.bitsail.connector.elasticsearch.sink.ElasticsearchSink"
],
"libs": [
"bitsail-connector-elasticsearch-${version}.jar"
"connector-elasticsearch-${version}.jar"
]
}
Original file line number Diff line number Diff line change
Expand Up @@ -50,4 +50,8 @@ public interface FakeReaderOptions extends ReaderOptions.BaseReaderOptions {
ConfigOption<String> TO_TIMESTAMP =
key(READER_PREFIX + "to_timestamp")
.defaultValue("2077-07-07 07:07:07");

ConfigOption<Integer> NULL_PERCENTAGE =
key(READER_PREFIX + "null_percentage")
.defaultValue(0);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/

package com.bytedance.bitsail.connector.fake.source;

import com.bytedance.bitsail.common.configuration.BitSailConfiguration;
import com.bytedance.bitsail.common.row.Row;
import com.bytedance.bitsail.common.typeinfo.BasicArrayTypeInfo;
import com.bytedance.bitsail.common.typeinfo.BasicTypeInfo;
import com.bytedance.bitsail.common.typeinfo.ListTypeInfo;
import com.bytedance.bitsail.common.typeinfo.MapTypeInfo;
import com.bytedance.bitsail.common.typeinfo.TypeInfo;
import com.bytedance.bitsail.common.typeinfo.TypeInfos;
import com.bytedance.bitsail.common.typeinfo.TypeProperty;
import com.bytedance.bitsail.connector.fake.option.FakeReaderOptions;

import cn.ipokerface.snowflake.SnowflakeIdGenerator;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import net.datafaker.Faker;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.ArrayUtils;

import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Map;

import static com.bytedance.bitsail.common.typeinfo.TypeProperty.NULLABLE;

public class FakeRowGenerator {

private final transient Faker faker;
private final transient SnowflakeIdGenerator snowflakeIdGenerator;

private final Integer nullPercentage;
private final long upper;
private final long lower;
private final transient Timestamp fromTimestamp;
private final transient Timestamp toTimestamp;

public FakeRowGenerator(BitSailConfiguration jobConf, int taskId) {
this.faker = new Faker();
this.snowflakeIdGenerator = new SnowflakeIdGenerator(taskId, taskId);

this.nullPercentage = jobConf.get(FakeReaderOptions.NULL_PERCENTAGE);
this.upper = jobConf.get(FakeReaderOptions.UPPER_LIMIT);
this.lower = jobConf.get(FakeReaderOptions.LOWER_LIMIT);
this.fromTimestamp = Timestamp.valueOf(jobConf.get(FakeReaderOptions.FROM_TIMESTAMP));
this.toTimestamp = Timestamp.valueOf(jobConf.get(FakeReaderOptions.TO_TIMESTAMP));
}

public Row fakeOneRecord(TypeInfo<?>[] typeInfos) {
Row row = new Row(ArrayUtils.getLength(typeInfos));
for (int index = 0; index < typeInfos.length; index++) {
TypeInfo<?> typeInfo = typeInfos[index];
if (isNullable(typeInfo) && isNull()) {
row.setField(index, null);
} else {
row.setField(index, fakeRawValue(typeInfo));
}
}
return row;
}

private boolean isNullable(TypeInfo<?> typeInfo) {
return typeInfo instanceof BasicTypeInfo
&& CollectionUtils.isNotEmpty(typeInfo.getTypeProperties())
&& typeInfo.getTypeProperties().contains(NULLABLE);
}

@SuppressWarnings("checkstyle:MagicNumber")
private boolean isNull() {
return (faker.number().randomNumber() % 100) < nullPercentage;
}

@SuppressWarnings("checkstyle:MagicNumber")
private Object fakeRawValue(TypeInfo<?> typeInfo) {

if (TypeInfos.LONG_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
if (CollectionUtils.isNotEmpty(typeInfo.getTypeProperties()) && typeInfo.getTypeProperties().contains(TypeProperty.UNIQUE)) {
return snowflakeIdGenerator.nextId();
} else {
return faker.number().randomNumber();
}
} else if (TypeInfos.INT_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return Long.valueOf(faker.number().randomNumber()).intValue();

} else if (TypeInfos.SHORT_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return Long.valueOf(faker.number().randomNumber()).shortValue();

} else if (TypeInfos.STRING_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.name().fullName();

} else if (TypeInfos.BOOLEAN_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.bool().bool();

} else if (TypeInfos.DOUBLE_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.number().randomDouble(5, lower, upper);

} else if (TypeInfos.FLOAT_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return Double.valueOf(faker.number().randomDouble(5, lower, upper)).floatValue();

} else if (TypeInfos.BIG_DECIMAL_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return new BigDecimal(faker.number().randomDouble(5, lower, upper));

} else if (TypeInfos.BIG_INTEGER_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return new BigInteger(String.valueOf(faker.number().randomNumber()));

} else if (BasicArrayTypeInfo.BINARY_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.name().fullName().getBytes();

} else if (TypeInfos.SQL_DATE_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return new java.sql.Date(faker.date().between(fromTimestamp, toTimestamp).getTime());

} else if (TypeInfos.SQL_TIME_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return new Time(faker.date().between(fromTimestamp, toTimestamp).getTime());

} else if (TypeInfos.SQL_TIMESTAMP_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return new Timestamp(faker.date().between(fromTimestamp, toTimestamp).getTime());

} else if (TypeInfos.LOCAL_DATE_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.date().between(fromTimestamp, toTimestamp).toLocalDateTime().toLocalDate();

} else if (TypeInfos.LOCAL_TIME_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.date().between(fromTimestamp, toTimestamp).toLocalDateTime().toLocalTime();

} else if (TypeInfos.LOCAL_DATE_TIME_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.date().between(fromTimestamp, toTimestamp).toLocalDateTime();

} else if (TypeInfos.VOID_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return null;
}

if (typeInfo instanceof ListTypeInfo) {
ListTypeInfo<?> listTypeInfo = (ListTypeInfo<?>) typeInfo;
return Lists.newArrayList(fakeRawValue(listTypeInfo.getElementTypeInfo()));
}

if (typeInfo instanceof MapTypeInfo) {
MapTypeInfo<?, ?> mapTypeInfo = (MapTypeInfo<?, ?>) typeInfo;
Map<Object, Object> mapRawValue = Maps.newHashMap();
mapRawValue.put(fakeRawValue(mapTypeInfo.getKeyTypeInfo()), fakeRawValue(mapTypeInfo.getValueTypeInfo()));
return mapRawValue;
}
throw new RuntimeException("Unsupported type " + typeInfo);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,148 +22,42 @@
import com.bytedance.bitsail.base.connector.reader.v1.SourcePipeline;
import com.bytedance.bitsail.common.configuration.BitSailConfiguration;
import com.bytedance.bitsail.common.row.Row;
import com.bytedance.bitsail.common.typeinfo.BasicArrayTypeInfo;
import com.bytedance.bitsail.common.typeinfo.ListTypeInfo;
import com.bytedance.bitsail.common.typeinfo.MapTypeInfo;
import com.bytedance.bitsail.common.typeinfo.TypeInfo;
import com.bytedance.bitsail.common.typeinfo.TypeInfos;
import com.bytedance.bitsail.common.typeinfo.TypeProperty;
import com.bytedance.bitsail.connector.base.source.SimpleSourceReaderBase;
import com.bytedance.bitsail.connector.fake.option.FakeReaderOptions;

import cn.ipokerface.snowflake.SnowflakeIdGenerator;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.RateLimiter;
import net.datafaker.Faker;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.ArrayUtils;

import java.math.BigDecimal;
import java.math.BigInteger;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;

public class FakeSourceReader extends SimpleSourceReaderBase<Row> {

private BitSailConfiguration readerConfiguration;
private TypeInfo<?>[] typeInfos;
private long upper;
private long lower;
private final BitSailConfiguration readerConfiguration;
private final TypeInfo<?>[] typeInfos;

private final transient Faker faker;
private final transient int totalCount;
private final transient RateLimiter fakeGenerateRate;
private final transient AtomicLong counter;
private final transient SnowflakeIdGenerator snowflakeIdGenerator;
private final transient Timestamp fromTimestamp;
private final transient Timestamp toTimestamp;

private final FakeRowGenerator fakeRowGenerator;

public FakeSourceReader(BitSailConfiguration readerConfiguration, Context context) {
this.readerConfiguration = readerConfiguration;
this.typeInfos = context.getTypeInfos();
this.totalCount = readerConfiguration.get(FakeReaderOptions.TOTAL_COUNT);
this.fakeGenerateRate = RateLimiter.create(readerConfiguration.get(FakeReaderOptions.RATE));
this.faker = new Faker();
this.counter = new AtomicLong();
this.snowflakeIdGenerator = new SnowflakeIdGenerator(context.getIndexOfSubtask(),
context.getIndexOfSubtask());
this.upper = readerConfiguration.get(FakeReaderOptions.UPPER_LIMIT);
this.lower = readerConfiguration.get(FakeReaderOptions.LOWER_LIMIT);
this.fromTimestamp = Timestamp.valueOf(readerConfiguration.get(FakeReaderOptions.FROM_TIMESTAMP));
this.toTimestamp = Timestamp.valueOf(readerConfiguration.get(FakeReaderOptions.TO_TIMESTAMP));
this.fakeRowGenerator = new FakeRowGenerator(readerConfiguration, context.getIndexOfSubtask());
}

@Override
public void pollNext(SourcePipeline<Row> pipeline) throws Exception {
fakeGenerateRate.acquire();
pipeline.output(fakeNextRecord());
pipeline.output(fakeRowGenerator.fakeOneRecord(typeInfos));
}

@Override
public boolean hasMoreElements() {
return counter.incrementAndGet() <= totalCount;
}

private Row fakeNextRecord() {
Row row = new Row(ArrayUtils.getLength(typeInfos));
for (int index = 0; index < typeInfos.length; index++) {
row.setField(index, fakeRawValue(typeInfos[index]));
}
return row;
}

@SuppressWarnings("checkstyle:MagicNumber")
private Object fakeRawValue(TypeInfo<?> typeInfo) {

if (TypeInfos.LONG_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
if (CollectionUtils.isNotEmpty(typeInfo.getTypeProperties()) && typeInfo.getTypeProperties().contains(TypeProperty.UNIQUE)) {
return snowflakeIdGenerator.nextId();
} else {
return faker.number().randomNumber();
}
} else if (TypeInfos.INT_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return Long.valueOf(faker.number().randomNumber()).intValue();

} else if (TypeInfos.SHORT_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return Long.valueOf(faker.number().randomNumber()).shortValue();

} else if (TypeInfos.STRING_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.name().fullName();

} else if (TypeInfos.BOOLEAN_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.bool().bool();

} else if (TypeInfos.DOUBLE_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.number().randomDouble(5, lower, upper);

} else if (TypeInfos.FLOAT_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return Double.valueOf(faker.number().randomDouble(5, lower, upper)).floatValue();

} else if (TypeInfos.BIG_DECIMAL_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return new BigDecimal(faker.number().randomDouble(5, lower, upper));

} else if (TypeInfos.BIG_INTEGER_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return new BigInteger(String.valueOf(faker.number().randomNumber()));

} else if (BasicArrayTypeInfo.BINARY_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.name().fullName().getBytes();

} else if (TypeInfos.SQL_DATE_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return new java.sql.Date(faker.date().between(fromTimestamp, toTimestamp).getTime());

} else if (TypeInfos.SQL_TIME_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return new Time(faker.date().between(fromTimestamp, toTimestamp).getTime());

} else if (TypeInfos.SQL_TIMESTAMP_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return new Timestamp(faker.date().between(fromTimestamp, toTimestamp).getTime());

} else if (TypeInfos.LOCAL_DATE_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.date().between(fromTimestamp, toTimestamp).toLocalDateTime().toLocalDate();

} else if (TypeInfos.LOCAL_TIME_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.date().between(fromTimestamp, toTimestamp).toLocalDateTime().toLocalTime();

} else if (TypeInfos.LOCAL_DATE_TIME_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return faker.date().between(fromTimestamp, toTimestamp).toLocalDateTime();

} else if (TypeInfos.VOID_TYPE_INFO.getTypeClass() == typeInfo.getTypeClass()) {
return null;
}

if (typeInfo instanceof ListTypeInfo) {
ListTypeInfo<?> listTypeInfo = (ListTypeInfo<?>) typeInfo;
return Lists.newArrayList(fakeRawValue(listTypeInfo.getElementTypeInfo()));
}

if (typeInfo instanceof MapTypeInfo) {
MapTypeInfo<?, ?> mapTypeInfo = (MapTypeInfo<?, ?>) typeInfo;
Map<Object, Object> mapRawValue = Maps.newHashMap();
mapRawValue.put(fakeRawValue(mapTypeInfo.getKeyTypeInfo()), fakeRawValue(mapTypeInfo.getValueTypeInfo()));
return mapRawValue;
}
throw new RuntimeException("Unsupported type " + typeInfo);
}
}