Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SKARA-971: Stop clumping up logs in logstash #1120

Closed
wants to merge 8 commits into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
@@ -22,6 +22,7 @@
*/
package org.openjdk.skara.bot;

import java.util.concurrent.atomic.AtomicInteger;
import org.openjdk.skara.json.JSONValue;

import java.io.IOException;
@@ -49,6 +50,8 @@ enum TaskPhases {
END
}

private AtomicInteger workIdCounter = new AtomicInteger();

private class RunnableWorkItem implements Runnable {
private final WorkItem item;

@@ -73,16 +76,19 @@ public void run() {
scratchPath = scratchPaths.removeFirst();
}

log.log(Level.FINE, "Executing item " + item + " on repository " + scratchPath, TaskPhases.BEGIN);
Collection<WorkItem> followUpItems = null;
try {
followUpItems = item.run(scratchPath);
} catch (RuntimeException e) {
log.severe("Exception during item execution (" + item + "): " + e.getMessage());
item.handleRuntimeException(e);
log.throwing(item.toString(), "run", e);
} finally {
log.log(Level.FINE, "Item " + item + " is now done", TaskPhases.END);
try (var __ = new LogContext(Map.of("work_item", item.toString(),
"work_id", String.valueOf(workIdCounter.incrementAndGet())))) {
log.log(Level.FINE, "Executing item " + item + " on repository " + scratchPath, TaskPhases.BEGIN);
try {
followUpItems = item.run(scratchPath);
} catch (RuntimeException e) {
log.severe("Exception during item execution (" + item + "): " + e.getMessage());
item.handleRuntimeException(e);
log.throwing(item.toString(), "run", e);
} finally {
log.log(Level.FINE, "Item " + item + " is now done", TaskPhases.END);
}
}
if (followUpItems != null) {
followUpItems.forEach(BotRunner.this::submitOrSchedule);
@@ -215,19 +221,21 @@ public BotRunner(BotRunnerConfiguration config, List<Bot> bots) {
}

private void checkPeriodicItems() {
log.log(Level.FINE, "Starting of checking for periodic items", TaskPhases.BEGIN);
try {
for (var bot : bots) {
var items = bot.getPeriodicItems();
for (var item : items) {
submitOrSchedule(item);
try (var __ = new LogContext("work_id", String.valueOf(workIdCounter.incrementAndGet()))) {
log.log(Level.FINE, "Starting of checking for periodic items", TaskPhases.BEGIN);
try {
for (var bot : bots) {
var items = bot.getPeriodicItems();
for (var item : items) {
submitOrSchedule(item);
}
}
} catch (RuntimeException e) {
log.severe("Exception during periodic item checking: " + e.getMessage());
log.throwing("BotRunner", "checkPeriodicItems", e);
} finally {
log.log(Level.FINE, "Done checking periodic items", TaskPhases.END);
}
} catch (RuntimeException e) {
log.severe("Exception during periodic item checking: " + e.getMessage());
log.throwing("BotRunner", "checkPeriodicItems", e);
} finally {
log.log(Level.FINE, "Done checking periodic items", TaskPhases.END);
}
}

@@ -248,20 +256,22 @@ private void itemWatchdog() {
}

private void processRestRequest(JSONValue request) {
log.log(Level.FINE, "Starting processing of incoming rest request", TaskPhases.BEGIN);
log.fine("Request: " + request);
try {
for (var bot : bots) {
var items = bot.processWebHook(request);
for (var item : items) {
submitOrSchedule(item);
try (var __ = new LogContext("work_id", String.valueOf(workIdCounter.incrementAndGet()))) {
log.log(Level.FINE, "Starting processing of incoming rest request", TaskPhases.BEGIN);
log.fine("Request: " + request);
try {
for (var bot : bots) {
var items = bot.processWebHook(request);
for (var item : items) {
submitOrSchedule(item);
}
}
} catch (RuntimeException e) {
log.severe("Exception during rest request processing: " + e.getMessage());
log.throwing("BotRunner", "processRestRequest", e);
} finally {
log.log(Level.FINE, "Done processing incoming rest request", TaskPhases.END);
}
} catch (RuntimeException e) {
log.severe("Exception during rest request processing: " + e.getMessage());
log.throwing("BotRunner", "processRestRequest", e);
} finally {
log.log(Level.FINE, "Done processing incoming rest request", TaskPhases.END);
}
}

@@ -0,0 +1,50 @@
package org.openjdk.skara.bot;

import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;

/**
* A LogContext is used to temporarily add extra log metadata in the current thread.
* It should be initiated with a try-with-resources construct. The variable itself
* is never used, we only want the controlled automatic close at the end of the try
* block. Typically name the variable __. Example:
*
* try (var __ = new LogContext("foo", "bar")) {
* // some code that logs stuff
* }
*/
public class LogContext implements AutoCloseable {
private static final Logger log = Logger.getLogger("org.openjdk.skara.bot");
private final Map<String, String> context = new HashMap<>();

public LogContext(String key, String value) {
this.init(Map.of(key, value));
}

public LogContext(Map<String, String> ctx) {
this.init(ctx);
}

private void init(Map<String, String> newContext) {
for (var entry : newContext.entrySet()) {
String currentValue = LogContextMap.get(entry.getKey());
if (currentValue != null) {
if (!currentValue.equals(entry.getValue())) {
log.severe("Tried to override the current LogContext value: " + currentValue
+ " for " + entry.getKey() + " with a different value: " + entry.getValue());
}
} else {
this.context.put(entry.getKey(), entry.getValue());
LogContextMap.put(entry.getKey(), entry.getValue());
}
}

}

public void close() {
this.context.forEach((key, value) -> {
LogContextMap.remove(key);
});
}
}
@@ -0,0 +1,48 @@
package org.openjdk.skara.bot;

import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;

/**
* This class holds a static thread local hashmap to store temporary log
* metadata which our custom StreamHandlers can pick up and include in log
* messages.
*/
public class LogContextMap {

private static final ThreadLocal<HashMap<String, String>> threadContextMap = new ThreadLocal<>();

public static void put(String key, String value) {
if (threadContextMap.get() == null) {
threadContextMap.set(new HashMap<>());
}
var map = threadContextMap.get();
map.put(key, value);
}

public static String get(String key) {
if (threadContextMap.get() != null) {
return threadContextMap.get().get(key);
} else {
return null;
}
}

public static String remove(String key) {
if (threadContextMap.get() != null) {
return threadContextMap.get().remove(key);
} else {
return null;
}
}

public static Set<Map.Entry<String, String>> entrySet() {
if (threadContextMap.get() != null) {
return threadContextMap.get().entrySet();
} else {
return Collections.emptySet();
}
}
}
@@ -0,0 +1,19 @@
package org.openjdk.skara.bot;

import org.junit.jupiter.api.Test;

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;

public class LogContextTests {

@Test
public void simple() {
String key = "keyname";
assertNull(LogContextMap.get(key), "Key " + key + " already present in context");
try (var __ = new LogContext(key, "value")) {
assertEquals("value", LogContextMap.get(key), "Context property not set");
}
assertNull(LogContextMap.get(key), "Context property not removed");
}
}
@@ -33,6 +33,7 @@
requires org.openjdk.skara.network;
requires org.openjdk.skara.version;

requires java.net.http;
requires java.sql;

exports org.openjdk.skara.bots.cli;
@@ -22,6 +22,9 @@
*/
package org.openjdk.skara.bots.cli;

import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import org.openjdk.skara.args.*;
import org.openjdk.skara.bot.*;
import org.openjdk.skara.network.URIBuilder;
@@ -41,6 +44,7 @@

public class BotLauncher {
private static Logger log;
private static final Instant START_TIME = Instant.now();

private static void applyLogging(JSONObject config) {
LogManager.getLogManager().reset();
@@ -81,11 +85,7 @@ private static void applyLogging(JSONObject config) {
if (config.get("log").asObject().contains("logstash")) {
var logstashConf = config.get("log").get("logstash").asObject();
var level = Level.parse(logstashConf.get("level").asString());
var maxRecords = 100;
if (logstashConf.contains("maxrecords")) {
maxRecords = logstashConf.get("maxrecords").asInt();
}
var handler = new BotLogstashHandler(URIBuilder.base(logstashConf.get("endpoint").asString()).build(), maxRecords);
var handler = new BotLogstashHandler(URIBuilder.base(logstashConf.get("endpoint").asString()).build());
if (logstashConf.contains("fields")) {
for (var field : logstashConf.get("fields").asArray()) {
if (field.asObject().contains("pattern")) {
@@ -99,6 +99,10 @@ private static void applyLogging(JSONObject config) {
}
}
handler.setLevel(level);
var dateTimeFormatter = DateTimeFormatter.ISO_INSTANT
.withLocale(Locale.getDefault())
.withZone(ZoneId.systemDefault());
handler.addExtraField("instance_start_time", dateTimeFormatter.format(START_TIME));
log.addHandler(handler);
}
}