Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

980: Update log level of thrown exceptions #1132

Closed
wants to merge 2 commits into from
Closed
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
@@ -83,9 +83,8 @@ public void run() {
try {
followUpItems = item.run(scratchPath);
} catch (RuntimeException e) {
log.severe("Exception during item execution (" + item + "): " + e.getMessage());
log.log(Level.SEVERE, "Exception during item execution (" + item + "): " + e.getMessage(), e);
item.handleRuntimeException(e);
log.throwing(item.toString(), "run", e);
} finally {
log.log(Level.FINE, "Item " + item + " is now done", TaskPhases.END);
}
@@ -168,8 +167,7 @@ private void drain(Duration timeout) throws TimeoutException {
try {
head.get();
} catch (InterruptedException | ExecutionException e) {
log.warning("Exception during queue drain");
log.throwing("BotRunner", "drain", e);
log.log(Level.WARNING,"Exception during queue drain", e);
erikj79 marked this conversation as resolved.
Show resolved Hide resolved
}
} else {
log.finest("Queue is now empty");
@@ -188,8 +186,7 @@ private void drain(Duration timeout) throws TimeoutException {
try {
Thread.sleep(1);
} catch (InterruptedException e) {
log.warning("Exception during queue drain");
log.throwing("BotRunner", "drain", e);
log.log(Level.WARNING, "Exception during queue drain", e);
}
}

@@ -231,8 +228,7 @@ private void checkPeriodicItems() {
}
}
} catch (RuntimeException e) {
log.severe("Exception during periodic item checking: " + e.getMessage());
log.throwing("BotRunner", "checkPeriodicItems", e);
log.log(Level.SEVERE, "Exception during periodic item checking: " + e.getMessage(), e);
} finally {
log.log(Level.FINE, "Done checking periodic items", TaskPhases.END);
}
@@ -267,8 +263,7 @@ private void processRestRequest(JSONValue request) {
}
}
} catch (RuntimeException e) {
log.severe("Exception during rest request processing: " + e.getMessage());
log.throwing("BotRunner", "processRestRequest", e);
log.log(Level.SEVERE, "Exception during rest request processing: " + e.getMessage(), e);
} finally {
log.log(Level.FINE, "Done processing incoming rest request", TaskPhases.END);
}
@@ -289,8 +284,7 @@ public void run(Duration timeout) {
try {
restReceiver = new RestReceiver(config.restReceiverPort().get(), this::processRestRequest);
} catch (IOException e) {
log.warning("Failed to create RestReceiver");
log.throwing("BotRunner", "run", e);
log.log(Level.WARNING, "Failed to create RestReceiver", e);
}
}

@@ -92,8 +92,7 @@ public final void publish(LogRecord record) {
}
}
catch (RuntimeException e) {
log.severe("Exception during task notification posting: " + e.getMessage());
log.throwing("BotTaskAggregationHandler", "publish", e);
log.log(Level.SEVERE, "Exception during task notification posting: " + e.getMessage(), e);
} finally {
threadEntry.isPublishing = false;
}
@@ -29,6 +29,7 @@
import java.net.*;
import java.nio.charset.StandardCharsets;
import java.util.function.Consumer;
import java.util.logging.Level;
import java.util.logging.Logger;

class RestReceiver {
@@ -53,8 +54,7 @@ public void handle(HttpExchange exchange) throws IOException {
var parsedInput = JSON.parse(input);
consumer.accept(parsedInput);
} catch (RuntimeException e) {
log.warning("Failed to parse incoming request: " + input);
log.throwing("RestReceiver", "Handler", e);
log.log(Level.WARNING, "Failed to parse incoming request: " + input, e);
}
}
}
@@ -25,6 +25,8 @@
import org.openjdk.skara.bot.LogContextMap;
import org.openjdk.skara.json.JSON;

import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.URI;
import java.net.http.*;
import java.time.*;
@@ -79,33 +81,6 @@ void addExtraField(String name, String value, String pattern) {
extraFields.add(extraField);
}

private void publishToLogstash(Instant time, Level level, String message, Map<String, String> extraFields) {
var query = JSON.object();
query.put("@timestamp", dateTimeFormatter.format(time));
query.put("level", level.getName());
query.put("level_value", level.intValue());
query.put("message", message);

for (var entry : LogContextMap.entrySet()) {
query.put(entry.getKey(), entry.getValue());
}

for (var extraField : extraFields.entrySet()) {
query.put(extraField.getKey(), extraField.getValue());
}

var httpRequest = HttpRequest.newBuilder()
.uri(endpoint)
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(query.toString()))
.build();
var future = httpClient.sendAsync(httpRequest, HttpResponse.BodyHandlers.discarding());
// Save futures in optional collection when running tests.
if (futures != null) {
futures.add(future);
}
}

private Map<String, String> getExtraFields(LogRecord record) {
var ret = new HashMap<String, String>();
for (var extraField : extraFields) {
@@ -127,7 +102,42 @@ public void publish(LogRecord record) {
if (record.getLevel().intValue() < getLevel().intValue()) {
return;
}
publishToLogstash(record.getInstant(), record.getLevel(), record.getMessage(), getExtraFields(record));
Level level = record.getLevel();
var query = JSON.object();
query.put("@timestamp", dateTimeFormatter.format(record.getInstant()));
query.put("level", level.getName());
query.put("level_value", level.intValue());
query.put("message", record.getMessage());

if (record.getLoggerName() != null) {
query.put("logger_name", record.getLoggerName());
}

if (record.getThrown() != null) {
var writer = new StringWriter();
var printer = new PrintWriter(writer);
record.getThrown().printStackTrace(printer);
query.put("stack_trace", writer.toString());
}

for (var entry : LogContextMap.entrySet()) {
query.put(entry.getKey(), entry.getValue());
}

for (var extraField : getExtraFields(record).entrySet()) {
query.put(extraField.getKey(), extraField.getValue());
}

var httpRequest = HttpRequest.newBuilder()
.uri(endpoint)
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(query.toString()))
.build();
var future = httpClient.sendAsync(httpRequest, HttpResponse.BodyHandlers.discarding());
// Save futures in optional collection when running tests.
if (futures != null) {
futures.add(future);
}
}

void setFuturesCollection(Collection<Future<HttpResponse<Void>>> futures) {
@@ -100,8 +100,7 @@ private void publishToSlack(String message) {

webhook.post("").body(query).executeUnparsed();
} catch (RuntimeException | IOException e) {
log.warning("Exception during slack notification posting: " + e.getMessage());
log.throwing("BotSlackHandler", "publish", e);
log.log(Level.WARNING, "Exception during slack notification posting: " + e.getMessage(), e);
}
}

@@ -45,6 +45,7 @@ void simple() throws IOException, ExecutionException, InterruptedException {
handler.setFuturesCollection(futures);

var record = new LogRecord(Level.INFO, "Hello");
record.setLoggerName("my.logger");
handler.publish(record);

for (Future<HttpResponse<Void>> future : futures) {
@@ -53,8 +54,9 @@ void simple() throws IOException, ExecutionException, InterruptedException {

var requests = receiver.getRequests();
assertEquals(1, requests.size(), requests.toString());
assertTrue(requests.get(0).get("message").asString().contains("Hello"));
assertTrue(requests.get(0).get("level").asString().contains(Level.INFO.getName()));
assertEquals("Hello", requests.get(0).get("message").asString());
assertEquals(Level.INFO.getName(), requests.get(0).get("level").asString());
assertEquals("my.logger", requests.get(0).get("logger_name").asString());
}
}

@@ -30,6 +30,7 @@
import java.nio.charset.StandardCharsets;
import java.nio.file.*;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;

public class JBridgeBot implements Bot, WorkItem {
@@ -128,8 +129,7 @@ public Collection<WorkItem> run(Path scratchPath) {
repo.pushAll(destination.url());
storage.resolve(successfulPushMarker).toFile().createNewFile();
} catch (IOException e) {
log.severe("Failed to push to " + destination.url());
log.throwing("JBridgeBot", "run", e);
log.log(Level.SEVERE, "Failed to push to " + destination.url(), e);
lastException = e;
}
} else {
@@ -290,7 +290,7 @@ public Collection<WorkItem> run(Path scratchPath) {
}
}
if (!errors.isEmpty()) {
errors.forEach(error -> log.throwing("RepositoryWorkItem", "run", error));
errors.forEach(error -> log.log(Level.WARNING, error.getMessage(), error));
throw new RuntimeException("Errors detected when processing repository notifications", errors.get(0));
}
} catch (IOException e) {
@@ -30,6 +30,7 @@
import java.nio.file.Path;
import java.time.Duration;
import java.util.*;
import java.util.logging.Level;
import java.util.stream.Stream;
import java.util.stream.Collectors;
import java.util.logging.Logger;
@@ -214,8 +215,7 @@ public void handle(PullRequestBot bot, PullRequest pr, CensusInstance censusInst
reply.println("No push attempt will be made.");
}
} catch (IOException | CommitFailure e) {
log.severe("An error occurred during integration (" + pr.webUrl() + "): " + e.getMessage());
log.throwing("IntegrateCommand", "handle", e);
log.log(Level.SEVERE, "An error occurred during integration (" + pr.webUrl() + "): " + e.getMessage(), e);
reply.println("An unexpected error occurred during integration. No push attempt will be made. " +
"The error has been logged and will be investigated. It is possible that this error " +
"is caused by a transient issue; feel free to retry the operation.");
@@ -30,6 +30,7 @@
import java.nio.file.Path;
import java.time.Duration;
import java.util.*;
import java.util.logging.Level;
import java.util.stream.Stream;
import java.util.stream.Collectors;
import java.util.logging.Logger;
@@ -156,8 +157,7 @@ public void handle(PullRequestBot bot, PullRequest pr, CensusInstance censusInst
reply.println("No push attempt will be made.");
}
} catch (IOException | CommitFailure e) {
log.severe("An error occurred during sponsored integration (" + pr.webUrl() + "): " + e.getMessage());
log.throwing("SponsorCommand", "handle", e);
log.log(Level.SEVERE, "An error occurred during sponsored integration (" + pr.webUrl() + "): " + e.getMessage(), e);
reply.println("An unexpected error occurred during sponsored integration. No push attempt will be made. " +
"The error has been logged and will be investigated. It is possible that this error " +
"is caused by a transient issue; feel free to retry the operation.");
@@ -30,6 +30,7 @@
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;

public class TestUpdateNeededWorkItem implements WorkItem {
@@ -68,8 +69,7 @@ public Collection<WorkItem> run(Path scratchPath) {
log.info("Getting test jobs for " + desc);
jobs = ci.jobsFor(pr);
} catch (IOException e) {
log.info("Could not retrieve test jobs for PR: " + desc);
log.throwing("TestBot", "getPeriodicItems", e);
log.log(Level.INFO, "Could not retrieve test jobs for PR: " + desc, e);
}

if (!jobs.isEmpty()) {
@@ -29,6 +29,7 @@
import java.nio.file.*;
import java.time.*;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;

public class HostedRepositoryPool {
@@ -121,8 +122,7 @@ private void removeOldClone(Path path, String reason) {
try {
Files.move(path, preserved);
} catch (IOException e) {
log.severe("Failed to preserve old clone at " + path);
log.throwing("HostedRepositoryInstance", "preserveOldClone", e);
log.log(Level.SEVERE, "Failed to preserve old clone at " + path, e);
} finally {
if (Files.exists(path)) {
clearDirectory(path);
@@ -28,6 +28,7 @@
import java.io.*;
import java.nio.file.*;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;

class HostedRepositoryStorage<T> implements Storage<T> {
@@ -73,8 +74,7 @@ private static Repository tryMaterialize(HostedRepository repository, Path local
Repository localRepository = Repository.init(localStorage, repository.repositoryType());
if (!localRepository.isEmpty()) {
// If the materialization failed but the local repository already contains data, do not initialize the ref
log.warning("Materialization into existing local repository failed");
log.throwing("HostedRepositoryStorage", "tryMaterialize", e2);
log.log(Level.WARNING, "Materialization into existing local repository failed", e2);
lastException = e2;
retryCount++;
continue;