Skip to content

Commit

Permalink
improved logging statement to include thingIds + BSON filter as well
Browse files Browse the repository at this point in the history
Signed-off-by: Thomas Jaeckle <thomas.jaeckle@bosch.io>
  • Loading branch information
thjaeckle committed Feb 18, 2022
1 parent 418cb65 commit 4dd9197
Showing 1 changed file with 8 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import java.util.Collection;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;

import org.bson.BsonDocument;
import org.eclipse.ditto.internal.utils.akka.logging.DittoLoggerFactory;
Expand All @@ -24,7 +25,6 @@
import org.eclipse.ditto.thingsearch.service.common.config.PersistenceStreamConfig;
import org.eclipse.ditto.thingsearch.service.persistence.PersistenceConstants;
import org.eclipse.ditto.thingsearch.service.persistence.write.model.AbstractWriteModel;
import org.eclipse.ditto.thingsearch.service.persistence.write.model.Metadata;
import org.eclipse.ditto.thingsearch.service.persistence.write.model.WriteResultAndErrors;

import com.mongodb.MongoBulkWriteException;
Expand Down Expand Up @@ -138,10 +138,13 @@ private Source<WriteResultAndErrors, NotUsed> executeBulkWrite(final boolean sho
final String bulkWriteCorrelationId = UUID.randomUUID().toString();
if (LOGGER.isDebugEnabled()) {
LOGGER.withCorrelationId(bulkWriteCorrelationId)
.debug("Executing BulkWrite containing correlationIds: {}", abstractWriteModels.stream()
.map(AbstractWriteModel::getMetadata)
.map(Metadata::getEventsCorrelationIds)
.flatMap(List::stream)
.debug("Executing BulkWrite containing [<thingId>:{correlationIds}:<filter>]: {}", abstractWriteModels.stream()
.map(writeModel -> "<" + writeModel.getMetadata().getThingId() + ">:" +
writeModel.getMetadata().getEventsCorrelationIds()
.stream()
.collect(Collectors.joining(",", "{", "}"))
+ ":<" + writeModel.getFilter() + ">"
)
.toList());

// only log the complete MongoDB writeModels on "TRACE" as they get really big and almost crash the logging backend:
Expand Down

0 comments on commit 4dd9197

Please sign in to comment.