-
Notifications
You must be signed in to change notification settings - Fork 13
/
BkKafkaUtilTest.java
88 lines (74 loc) · 3.06 KB
/
BkKafkaUtilTest.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
package com.bkatwal.kafka;
import com.bkatwal.kafka.impl.CustomAckMessageListener;
import com.bkatwal.kafka.util.KafkaConsumerUtil;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.junit.AfterClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.springframework.kafka.test.rule.KafkaEmbedded;
/** @author "Bikas Katwal" 29/03/19 */
public class BkKafkaUtilTest {
@ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, 1, "test-topic");
@AfterClass
public static void close() {
try {
embeddedKafka.destroy();
} catch (Exception e) {
// do nothing
}
}
/**
* this test will give a demo of the usage of this utility project. Below code needs to be used in
* the client that uses this project
*/
@Test
public void kafkaUtilTest() throws InterruptedException {
Logger.getRootLogger().setLevel(Level.INFO);
String broker = embeddedKafka.getBrokerAddresses()[0].toString();
Map<String, Object> props = new HashMap<>();
props.put("bootstrap.servers", broker);
props.put("group.id", "test-group");
props.put("enable.auto.commit", false);
props.put("auto.commit.interval.ms", "1000");
props.put("session.timeout.ms", "30000");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
// create your own message processor, this will have processing logic and can save to one or
// more target DB
ConcreteMessageProcessor sampleCustomJsonConverter = new ConcreteMessageProcessor();
CustomAckMessageListener customAckMessageListener =
new CustomAckMessageListener(sampleCustomJsonConverter);
// start the consumer
KafkaConsumerUtil.startOrCreateConsumers("test-topic", customAckMessageListener, 1, props);
// waiting for consumer to start and partition assignment
Thread.sleep(30000);
sampleProducer(broker);
// wait for consumer to finish consuming data
Thread.sleep(10000);
// stop the consumer
KafkaConsumerUtil.stopConsumer("test-topic");
}
private void sampleProducer(String bootstrapServer) {
Properties props = new Properties();
props.put("bootstrap.servers", bootstrapServer);
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 2);
props.put("linger.ms", 1);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer<String, String> producer = new KafkaProducer<>(props);
for (int i = 0; i < 100; i++)
producer.send(
new ProducerRecord<>("test-topic", Integer.toString(i), "{\"name\":\"bikas\"}"));
producer.close();
}
}