-
Notifications
You must be signed in to change notification settings - Fork 215
/
SearchUpdaterStream.java
193 lines (168 loc) · 8.59 KB
/
SearchUpdaterStream.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
/*
* Copyright (c) 2019 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.eclipse.ditto.thingsearch.service.persistence.write.streaming;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.CompletionStage;
import java.util.function.Function;
import org.eclipse.ditto.internal.utils.namespaces.BlockedNamespaces;
import org.eclipse.ditto.thingsearch.service.common.config.PersistenceStreamConfig;
import org.eclipse.ditto.thingsearch.service.common.config.StreamStageConfig;
import org.eclipse.ditto.thingsearch.service.common.config.UpdaterConfig;
import org.eclipse.ditto.thingsearch.service.persistence.write.model.AbstractWriteModel;
import org.eclipse.ditto.thingsearch.service.persistence.write.model.Metadata;
import org.eclipse.ditto.thingsearch.service.persistence.write.model.WriteResultAndErrors;
import com.mongodb.reactivestreams.client.MongoDatabase;
import akka.NotUsed;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.stream.Attributes;
import akka.stream.KillSwitch;
import akka.stream.KillSwitches;
import akka.stream.RestartSettings;
import akka.stream.javadsl.Flow;
import akka.stream.javadsl.Keep;
import akka.stream.javadsl.RestartSource;
import akka.stream.javadsl.Sink;
import akka.stream.javadsl.Source;
import akka.stream.javadsl.SubSource;
/**
* Stream from the cache of Thing changes to the persistence of the search index.
*/
public final class SearchUpdaterStream {
/**
* Header to request this actor to perform a force-update due to a previous patch not being applied.
*/
public static final String FORCE_UPDATE_INCORRECT_PATCH = "force-update-incorrect-patch";
private final UpdaterConfig updaterConfig;
private final EnforcementFlow enforcementFlow;
private final MongoSearchUpdaterFlow mongoSearchUpdaterFlow;
private final BulkWriteResultAckFlow bulkWriteResultAckFlow;
private final ActorRef changeQueueActor;
private final BlockedNamespaces blockedNamespaces;
private final ActorSystem actorSystem;
private SearchUpdaterStream(final UpdaterConfig updaterConfig,
final EnforcementFlow enforcementFlow,
final MongoSearchUpdaterFlow mongoSearchUpdaterFlow,
final BulkWriteResultAckFlow bulkWriteResultAckFlow,
final ActorRef changeQueueActor,
final BlockedNamespaces blockedNamespaces,
final ActorSystem actorSystem) {
this.updaterConfig = updaterConfig;
this.enforcementFlow = enforcementFlow;
this.mongoSearchUpdaterFlow = mongoSearchUpdaterFlow;
this.bulkWriteResultAckFlow = bulkWriteResultAckFlow;
this.changeQueueActor = changeQueueActor;
this.blockedNamespaces = blockedNamespaces;
this.actorSystem = actorSystem;
}
/**
* Create a restart-able SearchUpdaterStream object.
*
* @param updaterConfig the search updater configuration settings.
* @param actorSystem actor system to run the stream in.
* @param thingsShard shard region proxy of things.
* @param policiesShard shard region proxy of policies.
* @param updaterShard shard region of search updaters.
* @param changeQueueActor reference of the change queue actor.
* @param database MongoDB database.
* @param searchUpdateMapper a custom listener for search updates.
* @return a SearchUpdaterStream object.
*/
public static SearchUpdaterStream of(final UpdaterConfig updaterConfig,
final ActorSystem actorSystem,
final ActorRef thingsShard,
final ActorRef policiesShard,
final ActorRef updaterShard,
final ActorRef changeQueueActor,
final MongoDatabase database,
final BlockedNamespaces blockedNamespaces,
final SearchUpdateMapper searchUpdateMapper) {
final var streamConfig = updaterConfig.getStreamConfig();
final var enforcementFlow =
EnforcementFlow.of(actorSystem, streamConfig, thingsShard, policiesShard, actorSystem.getScheduler());
final var mongoSearchUpdaterFlow =
MongoSearchUpdaterFlow.of(database, streamConfig.getPersistenceConfig(), searchUpdateMapper);
final var bulkWriteResultAckFlow = BulkWriteResultAckFlow.of(updaterShard);
return new SearchUpdaterStream(updaterConfig, enforcementFlow, mongoSearchUpdaterFlow, bulkWriteResultAckFlow,
changeQueueActor, blockedNamespaces, actorSystem);
}
/**
* Start a perpetual search updater stream killed only by the kill-switch.
*
* @return kill-switch to terminate the stream.
*/
public KillSwitch start() {
return createRestartResultSource().viaMat(KillSwitches.single(), Keep.right())
.flatMapConcat(SubSource::mergeSubstreams)
.to(Sink.ignore())
.run(actorSystem);
}
private Source<SubSource<String, NotUsed>, NotUsed> createRestartResultSource() {
final var streamConfig = updaterConfig.getStreamConfig();
final StreamStageConfig retrievalConfig = streamConfig.getRetrievalConfig();
final PersistenceStreamConfig persistenceConfig = streamConfig.getPersistenceConfig();
final var acknowledgedSource =
ChangeQueueActor.createSource(changeQueueActor, true, streamConfig.getWriteInterval());
final var unacknowledgedSource =
ChangeQueueActor.createSource(changeQueueActor, false, streamConfig.getWriteInterval());
final var mergedSource =
acknowledgedSource.mergePrioritized(unacknowledgedSource, 1023, 1, true);
final SubSource<List<AbstractWriteModel>, NotUsed> enforcementSource = enforcementFlow.create(
mergedSource.via(filterMapKeysByBlockedNamespaces()),
retrievalConfig.getParallelism(),
persistenceConfig.getParallelism(),
actorSystem);
final String logName = "SearchUpdaterStream/BulkWriteResult";
final SubSource<WriteResultAndErrors, NotUsed> persistenceSource = mongoSearchUpdaterFlow.start(
enforcementSource,
true,
persistenceConfig.getMaxBulkSize()
);
final SubSource<String, NotUsed> loggingSource =
persistenceSource.via(bulkWriteResultAckFlow.start(persistenceConfig.getAckDelay()))
.log(logName)
.withAttributes(Attributes.logLevels(
Attributes.logLevelInfo(),
Attributes.logLevelWarning(),
Attributes.logLevelError()));
final var backOffConfig = retrievalConfig.getExponentialBackOffConfig();
return RestartSource.withBackoff(
RestartSettings.create(backOffConfig.getMin(), backOffConfig.getMax(), backOffConfig.getRandomFactor()),
() -> Source.single(loggingSource));
}
private Flow<Collection<Metadata>, Collection<Metadata>, NotUsed> filterMapKeysByBlockedNamespaces() {
return Flow.<Collection<Metadata>>create()
.<Collection<Metadata>, NotUsed>flatMapConcat(map ->
Source.fromIterator(map::iterator)
.via(blockNamespaceFlow(entry -> entry.getThingId().getNamespace()))
.fold(new ArrayList<>(), (accumulator, entry) -> {
accumulator.add(entry);
return accumulator;
})
)
.withAttributes(Attributes.inputBuffer(1, 1));
}
private Flow<Metadata, Metadata, NotUsed> blockNamespaceFlow(final Function<Metadata, String> namespaceExtractor) {
return Flow.<Metadata>create()
.flatMapConcat(element -> {
final String namespace = namespaceExtractor.apply(element);
final CompletionStage<Boolean> shouldUpdate = blockedNamespaces.contains(namespace)
.handle((result, error) -> result == null || !result);
return Source.completionStage(shouldUpdate)
.filter(Boolean::valueOf)
.map(bool -> element);
});
}
}