/
AtMostOnceConsumerStream.java
172 lines (142 loc) · 7.04 KB
/
AtMostOnceConsumerStream.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
/*
* Copyright (c) 2021 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.eclipse.ditto.connectivity.service.messaging.kafka;
import java.nio.ByteBuffer;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
import java.util.function.BiConsumer;
import javax.annotation.concurrent.Immutable;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.eclipse.ditto.base.model.exceptions.DittoRuntimeException;
import org.eclipse.ditto.connectivity.api.ExternalMessage;
import org.eclipse.ditto.connectivity.model.ConnectionId;
import org.eclipse.ditto.connectivity.service.config.ConnectionThrottlingConfig;
import org.eclipse.ditto.connectivity.service.messaging.AcknowledgeableMessage;
import org.eclipse.ditto.connectivity.service.messaging.monitoring.ConnectionMonitor;
import org.eclipse.ditto.internal.utils.akka.logging.DittoLoggerFactory;
import org.slf4j.Logger;
import akka.Done;
import akka.NotUsed;
import akka.kafka.javadsl.Consumer;
import akka.stream.Materializer;
import akka.stream.javadsl.MergeHub;
import akka.stream.javadsl.Sink;
import akka.stream.javadsl.Source;
/**
* Kafka consumer stream with "at most once" (QoS 0) semantics.
*/
@Immutable
final class AtMostOnceConsumerStream implements KafkaConsumerStream {
private static final Logger LOGGER = DittoLoggerFactory.getThreadSafeLogger(AtMostOnceConsumerStream.class);
private final Materializer materializer;
private final Consumer.DrainingControl<Done> consumerControl;
private final Sink<KafkaCompletableMessage, NotUsed> externalMessageSink;
private final Sink<TransformationResult, NotUsed> dreSink;
private final Sink<TransformationResult, NotUsed> unexpectedMessageSink;
private final KafkaConsumerMetrics consumerMetrics;
AtMostOnceConsumerStream(
final AtMostOnceKafkaConsumerSourceSupplier sourceSupplier,
final ConnectionThrottlingConfig throttlingConfig,
final KafkaMessageTransformer kafkaMessageTransformer,
final boolean dryRun,
final Materializer materializer,
final ConnectionMonitor inboundMonitor,
final Sink<AcknowledgeableMessage, NotUsed> inboundMappingSink,
final Sink<DittoRuntimeException, ?> exceptionSink,
final ConnectionId connectionId,
final String consumerId) {
this.materializer = materializer;
// Pre materialize sinks with MergeHub to avoid multiple materialization per kafka record in processTransformationResult
externalMessageSink = MergeHub.of(KafkaCompletableMessage.class)
.map(KafkaCompletableMessage::getAcknowledgeableMessage)
.to(inboundMappingSink)
.run(materializer);
dreSink = MergeHub.of(TransformationResult.class)
.map(AtMostOnceConsumerStream::extractDittoRuntimeException)
.to(exceptionSink)
.run(materializer);
unexpectedMessageSink = MergeHub.of(TransformationResult.class)
.to(Sink.foreach(result -> inboundMonitor.exception(
"Got unexpected transformation result <{0}>. This is an internal error. " +
"Please contact the service team", result
)))
.run(materializer);
final var source = sourceSupplier.get()
.filter(consumerRecord -> isNotDryRun(consumerRecord, dryRun))
.map(kafkaMessageTransformer::transform)
.filter(result -> !result.isExpired());
final Source<TransformationResult, Consumer.Control> throttledSource;
if (throttlingConfig.isEnabled()) {
throttledSource = source.throttle(throttlingConfig.getLimit(), throttlingConfig.getInterval());
} else {
throttledSource = source;
}
consumerControl = throttledSource
.flatMapConcat(this::processTransformationResult)
.mapAsync(throttlingConfig.getMaxInFlight(), x -> x)
.toMat(Sink.ignore(), Consumer::createDrainingControl)
.run(materializer);
consumerMetrics = KafkaConsumerMetrics.newInstance(consumerControl, connectionId, consumerId);
}
@Override
public CompletionStage<Done> whenComplete(final BiConsumer<? super Done, ? super Throwable> handleCompletion) {
return consumerControl.streamCompletion().whenComplete(handleCompletion);
}
@Override
public CompletionStage<Done> stop() {
return consumerControl.drainAndShutdown(materializer.executionContext());
}
@Override
public void reportMetrics() {
consumerMetrics.reportMetrics();
}
private Source<CompletableFuture<Done>, NotUsed> processTransformationResult(
final TransformationResult result) {
if (isExternalMessage(result)) {
return Source.single(result)
.map(AtMostOnceConsumerStream::toAcknowledgeableMessage)
.alsoTo(externalMessageSink)
.map(KafkaCompletableMessage::getAcknowledgementFuture);
}
final CompletableFuture<Done> offsetFuture = CompletableFuture.completedFuture(Done.getInstance());
if (isDittoRuntimeException(result)) {
return Source.single(result)
.alsoTo(dreSink)
.map(transformationResult -> offsetFuture);
}
return Source.single(result)
.alsoTo(unexpectedMessageSink)
.map(unexpected -> offsetFuture);
}
private static KafkaCompletableMessage toAcknowledgeableMessage(final TransformationResult value) {
final ExternalMessage externalMessage =
value.getExternalMessage().orElseThrow(); // at this point, the ExternalMessage is present
return new KafkaCompletableMessage(externalMessage);
}
private static boolean isExternalMessage(final TransformationResult value) {
return value.getExternalMessage().isPresent();
}
private static boolean isNotDryRun(final ConsumerRecord<String, ByteBuffer> cRecord, final boolean dryRun) {
if (dryRun && LOGGER.isDebugEnabled()) {
LOGGER.debug("Dropping record (key: {}, topic: {}, partition: {}, offset: {}) in dry run mode.",
cRecord.key(), cRecord.topic(), cRecord.partition(), cRecord.offset());
}
return !dryRun;
}
private static boolean isDittoRuntimeException(final TransformationResult value) {
return value.getDittoRuntimeException().isPresent();
}
private static DittoRuntimeException extractDittoRuntimeException(final TransformationResult value) {
return value.getDittoRuntimeException().orElseThrow(); // at this point, the DRE is present
}
}