/
client.go
72 lines (64 loc) · 1.61 KB
/
client.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
package kaplex
import (
"context"
"encoding/json"
"github.com/rs/zerolog/log"
"github.com/segmentio/kafka-go"
"strings"
"time"
)
func Send(event *Event) error {
w := &kafka.Writer{
Addr: kafka.TCP(config.Url),
Topic: event.Topic,
Balancer: &kafka.LeastBytes{},
AllowAutoTopicCreation: true,
Async: true,
Completion: func(messages []kafka.Message, err error) {
if err != nil {
log.Err(err).Msg("error when writing message to kafka")
}
},
}
defer closeWriter(w)
message := kafka.Message{
Key: []byte(event.ID),
Value: event.AsJson(),
Time: time.Now(),
}
return w.WriteMessages(context.Background(), message)
}
func Read(topic string, f func(e Event)) {
brokers := strings.Split(config.Url, ",")
r := kafka.NewReader(kafka.ReaderConfig{
Brokers: brokers,
GroupID: config.ConsumerGroup,
Topic: topic,
MinBytes: 10e3, // 10KB
MaxBytes: 10e6, // 10MB
})
defer closeReader(r)
log.Info().Msgf("start consuming kafka messages for topic %s", topic)
for {
msg, err := r.ReadMessage(context.Background())
if err != nil {
log.Err(err).Msgf("error when consuming message for topic %s", topic)
}
var event Event
err = json.Unmarshal(msg.Value, &event)
if err != nil {
log.Err(err).Msgf("error when resolving message content for topic %s", topic)
}
f(event)
}
}
func closeWriter(w *kafka.Writer) {
if err := w.Close(); err != nil {
log.Fatal().Msg("could not close writer")
}
}
func closeReader(r *kafka.Reader) {
if err := r.Close(); err != nil {
log.Fatal().Msg("could not close writer")
}
}