Skip to content
Permalink
Browse files
[improvement](spark-connector)(flink-connector) Modify the max num of…
… batch written by Spark/Flink connector each time. (#7485)

Increase the default batch size and flush interval
  • Loading branch information
hf200012 committed Dec 26, 2021
1 parent b799643 commit 115ae27a5d3f747e2cec0e23f0567ac7a5a49a3f
Showing 2 changed files with 3 additions and 5 deletions.
@@ -66,11 +66,9 @@ public interface ConfigurationOptions {

String DORIS_WRITE_FIELDS = "doris.write.fields";

String SINK_BATCH_SIZE = "sink.batch.size";
String DORIS_SINK_BATCH_SIZE = "doris.sink.batch.size";
int SINK_BATCH_SIZE_DEFAULT = 1024;
int SINK_BATCH_SIZE_DEFAULT = 10000;

String SINK_MAX_RETRIES = "sink.max-retries";
String DORIS_SINK_MAX_RETRIES = "doris.sink.max-retries";
int SINK_MAX_RETRIES_DEFAULT = 3;
int SINK_MAX_RETRIES_DEFAULT = 1;
}
@@ -72,7 +72,7 @@ private[sql] class DorisStreamLoadSink(sqlContext: SQLContext, settings: SparkSe
val loop = new Breaks
loop.breakable {

for (i <- 1 to maxRetryTimes) {
for (i <- 0 to maxRetryTimes) {
try {
dorisStreamLoader.load(rowsBuffer)
rowsBuffer.clear()

0 comments on commit 115ae27

Please sign in to comment.