diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index b0bbfb0..6e43e50 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -58,4 +58,4 @@ jobs:
ghcr.io/${{ github.repository_owner }}/podmortem-log-parser:${{ github.sha }}
build-args: |
GITHUB_USER=${{ github.actor }}
- GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}
\ No newline at end of file
+ GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}
diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties
index 1a580be..8a5e4de 100644
--- a/.mvn/wrapper/maven-wrapper.properties
+++ b/.mvn/wrapper/maven-wrapper.properties
@@ -17,4 +17,4 @@
wrapperVersion=3.3.2
distributionType=source
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip
-wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.3.2/maven-wrapper-3.3.2.jar
\ No newline at end of file
+wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.3.2/maven-wrapper-3.3.2.jar
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index f98c4b3..7e9f536 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -12,7 +12,6 @@ repos:
hooks:
- id: spotless
name: spotless
- entry: mvn spotless:apply
+ entry: bash -c 'mvn spotless:apply'
language: system
- files: \.java$
- stages: [pre-commit]
+ files: \.(java|xml|json|md)$
diff --git a/patterns/spring-boot-test.yml b/patterns/spring-boot-test.yml
new file mode 100644
index 0000000..a45e819
--- /dev/null
+++ b/patterns/spring-boot-test.yml
@@ -0,0 +1,10 @@
+metadata:
+ library_id: "spring-boot-test"
+ version: "1.0.0"
+patterns:
+ - id: "test_pattern_1"
+ name: "Test Pattern"
+ primary_pattern:
+ regex: ".*ERROR.*"
+ confidence: 0.8
+ severity: "HIGH"
diff --git a/pom.xml b/pom.xml
index daff623..cc148c1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -15,6 +15,8 @@
3.23.4
true
3.5.3
+
+ 1.0-f3f9123-SNAPSHOT
@@ -34,6 +36,13 @@
pom
import
+
+ ${quarkus.platform.group-id}
+ quarkus-operator-sdk-bom
+ ${quarkus.platform.version}
+ pom
+ import
+
@@ -46,6 +55,19 @@
io.quarkus
quarkus-arc
+
+ com.redhat.podmortem
+ common
+ ${podmortem.common.lib.version}
+
+
+ com.fasterxml.jackson.dataformat
+ jackson-dataformat-yaml
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
io.quarkus
quarkus-junit5
@@ -70,10 +92,19 @@
1.17.0
-
-
+
+
+
+
+ spotless-check
+ verify
+
+ check
+
+
+
${quarkus.platform.group-id}
diff --git a/src/main/java/com/redhat/podmortem/GreetingResource.java b/src/main/java/com/redhat/podmortem/GreetingResource.java
deleted file mode 100644
index 711f63e..0000000
--- a/src/main/java/com/redhat/podmortem/GreetingResource.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.redhat.podmortem;
-
-import jakarta.ws.rs.GET;
-import jakarta.ws.rs.Path;
-import jakarta.ws.rs.Produces;
-import jakarta.ws.rs.core.MediaType;
-
-@Path("/hello")
-public class GreetingResource {
-
- @GET
- @Produces(MediaType.TEXT_PLAIN)
- public String hello() {
- return "Hello from Quarkus REST";
- }
-}
diff --git a/src/main/java/com/redhat/podmortem/rest/Parse.java b/src/main/java/com/redhat/podmortem/rest/Parse.java
new file mode 100644
index 0000000..cbd28c2
--- /dev/null
+++ b/src/main/java/com/redhat/podmortem/rest/Parse.java
@@ -0,0 +1,44 @@
+package com.redhat.podmortem.rest;
+
+import com.redhat.podmortem.common.model.analysis.AnalysisResult;
+import com.redhat.podmortem.common.model.kube.podmortem.PodFailureData;
+import com.redhat.podmortem.service.AnalysisService;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Path("/parse")
+public class Parse {
+
+ private static final Logger log = LoggerFactory.getLogger(Parse.class);
+
+ @Inject AnalysisService analysisService;
+
+ @POST
+ @Consumes(MediaType.APPLICATION_JSON)
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response parseLogs(PodFailureData data) {
+ if (data == null || data.getPod() == null) {
+ return Response.status(Response.Status.BAD_REQUEST)
+ .entity("{\"error\":\"Invalid PodFailureData provided\"}")
+ .build();
+ }
+
+ log.info("Received analysis request for pod: {}", data.getPod().getMetadata().getName());
+
+ AnalysisResult result = analysisService.analyze(data);
+
+ log.info(
+ "Analysis complete for pod: {}. Found {} significant events.",
+ data.getPod().getMetadata().getName(),
+ result.getSummary().getSignificantEvents());
+
+ return Response.ok(result).build();
+ }
+}
diff --git a/src/main/java/com/redhat/podmortem/service/AnalysisService.java b/src/main/java/com/redhat/podmortem/service/AnalysisService.java
new file mode 100644
index 0000000..232491f
--- /dev/null
+++ b/src/main/java/com/redhat/podmortem/service/AnalysisService.java
@@ -0,0 +1,197 @@
+package com.redhat.podmortem.service;
+
+import com.redhat.podmortem.common.model.analysis.AnalysisMetadata;
+import com.redhat.podmortem.common.model.analysis.AnalysisResult;
+import com.redhat.podmortem.common.model.analysis.AnalysisSummary;
+import com.redhat.podmortem.common.model.analysis.EventContext;
+import com.redhat.podmortem.common.model.analysis.MatchedEvent;
+import com.redhat.podmortem.common.model.kube.podmortem.PodFailureData;
+import com.redhat.podmortem.common.model.pattern.ContextExtraction;
+import com.redhat.podmortem.common.model.pattern.Pattern;
+import com.redhat.podmortem.common.model.pattern.PatternSet;
+import com.redhat.podmortem.common.model.pattern.SecondaryPattern;
+import jakarta.enterprise.context.ApplicationScoped;
+import jakarta.inject.Inject;
+import java.time.Instant;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.function.Function;
+import java.util.regex.Matcher;
+import java.util.stream.Collectors;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@ApplicationScoped
+public class AnalysisService {
+
+ private static final Logger log = LoggerFactory.getLogger(AnalysisService.class);
+
+ @Inject PatternService patternService;
+
+ @Inject ScoringService scoringService;
+
+ /**
+ * Analyzes the provided pod failure data against the loaded pattern sets.
+ *
+ * @param data The collected data from a failed pod, including logs.
+ * @return An {@link AnalysisResult} object containing all findings.
+ */
+ public AnalysisResult analyze(PodFailureData data) {
+ long startTime = System.currentTimeMillis();
+ List foundEvents = new ArrayList<>();
+ String[] logLines = data.getLogs().split("\\r?\\n");
+
+ // pre-compile all regex patterns
+ for (PatternSet patternSet : patternService.getPatternSets()) {
+ if (patternSet.getPatterns() == null) {
+ continue;
+ }
+ for (Pattern pattern : patternSet.getPatterns()) {
+ // compile primary pattern
+ pattern.getPrimaryPattern()
+ .setCompiledRegex(
+ java.util.regex.Pattern.compile(
+ pattern.getPrimaryPattern().getRegex()));
+
+ // compile secondary patterns
+ if (pattern.getSecondaryPatterns() != null) {
+ for (SecondaryPattern sp : pattern.getSecondaryPatterns()) {
+ sp.setCompiledRegex(java.util.regex.Pattern.compile(sp.getRegex()));
+ }
+ }
+ }
+ }
+
+ // look for matches in the logs
+ for (int logLine = 0; logLine < logLines.length; logLine++) {
+ String line = logLines[logLine];
+ for (var patternSet : patternService.getPatternSets()) {
+ for (var pattern : patternSet.getPatterns()) {
+ Matcher matcher = pattern.getPrimaryPattern().getCompiledRegex().matcher(line);
+
+ if (matcher.find()) {
+ log.info(
+ "Line {}: Found match for pattern '{}'",
+ logLine + 1,
+ pattern.getName());
+ MatchedEvent event = new MatchedEvent();
+ event.setLineNumber(logLine + 1);
+ event.setMatchedPattern(pattern);
+ event.setContext(
+ extractContext(logLines, logLine, pattern.getContextExtraction()));
+
+ double score = scoringService.calculateScore(event, logLines);
+ event.setScore(score);
+
+ foundEvents.add(event);
+ }
+ }
+ }
+ }
+
+ AnalysisResult result = new AnalysisResult();
+ result.setEvents(foundEvents);
+ result.setAnalysisId(UUID.randomUUID().toString());
+ result.setMetadata(buildMetadata(startTime, logLines, patternService.getPatternSets()));
+ result.setSummary(buildSummary(foundEvents));
+
+ return result;
+ }
+
+ /**
+ * Extracts the surrounding log lines based on the pattern's extraction rules.
+ *
+ * @param allLines The complete array of log lines.
+ * @param matchIndex The index of the line where the primary pattern matched.
+ * @param rules The context extraction rules from the matched pattern.
+ * @return An {@link EventContext} object populated with the relevant lines.
+ */
+ private EventContext extractContext(
+ String[] allLines, int matchIndex, ContextExtraction rules) {
+ EventContext context = new EventContext();
+ context.setMatchedLine(allLines[matchIndex]);
+
+ if (rules == null) {
+ return context;
+ }
+
+ // get lines before the match
+ int beforeStart = Math.max(0, matchIndex - rules.getLinesBefore());
+ List beforeLines =
+ Arrays.asList(Arrays.copyOfRange(allLines, beforeStart, matchIndex));
+ context.setLinesBefore(beforeLines);
+
+ // get lines after the match
+ int afterEnd = Math.min(allLines.length, matchIndex + 1 + rules.getLinesAfter());
+ List afterLines =
+ Arrays.asList(Arrays.copyOfRange(allLines, matchIndex + 1, afterEnd));
+ context.setLinesAfter(afterLines);
+
+ // TODO: Implement stack trace detection logic based on rules.getIncludeStackTrace()
+
+ return context;
+ }
+
+ /**
+ * Builds the metadata object for the analysis result.
+ *
+ * @param startTime The timestamp when the analysis began.
+ * @param logLines The complete array of log lines.
+ * @param patternSets The list of pattern sets used in the analysis.
+ * @return A populated {@link AnalysisMetadata} object.
+ */
+ private AnalysisMetadata buildMetadata(
+ long startTime, String[] logLines, List patternSets) {
+ AnalysisMetadata metadata = new AnalysisMetadata();
+ metadata.setProcessingTimeMs(System.currentTimeMillis() - startTime);
+ metadata.setTotalLines(logLines.length);
+ metadata.setAnalyzedAt(Instant.now().toString());
+
+ List patternsUsed =
+ patternSets.stream()
+ .map(ps -> ps.getMetadata().getLibraryId())
+ .collect(Collectors.toList());
+ metadata.setPatternsUsed(patternsUsed);
+
+ return metadata;
+ }
+
+ /**
+ * Builds the summary object for the analysis result.
+ *
+ * @param events The list of all events found during the analysis.
+ * @return A populated {@link AnalysisSummary} object.
+ */
+ private AnalysisSummary buildSummary(List events) {
+ AnalysisSummary summary = new AnalysisSummary();
+ summary.setSignificantEvents(events.size());
+
+ if (events.isEmpty()) {
+ summary.setHighestSeverity("NONE");
+ summary.setSeverityDistribution(Map.of());
+ return summary;
+ }
+
+ // calculate severity distribution
+ Map distribution =
+ events.stream()
+ .map(e -> e.getMatchedPattern().getSeverity().toUpperCase())
+ .collect(Collectors.groupingBy(Function.identity(), Collectors.counting()));
+ summary.setSeverityDistribution(distribution);
+
+ // determine highest severity
+ List severityOrder = List.of("INFO", "LOW", "MEDIUM", "HIGH", "CRITICAL");
+ String highestSeverity =
+ events.stream()
+ .map(e -> e.getMatchedPattern().getSeverity().toUpperCase())
+ .max(Comparator.comparingInt(severityOrder::indexOf))
+ .orElse("NONE");
+ summary.setHighestSeverity(highestSeverity);
+
+ return summary;
+ }
+}
diff --git a/src/main/java/com/redhat/podmortem/service/PatternService.java b/src/main/java/com/redhat/podmortem/service/PatternService.java
new file mode 100644
index 0000000..b661109
--- /dev/null
+++ b/src/main/java/com/redhat/podmortem/service/PatternService.java
@@ -0,0 +1,81 @@
+package com.redhat.podmortem.service;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+import com.redhat.podmortem.common.model.pattern.PatternSet;
+import jakarta.annotation.PostConstruct;
+import jakarta.enterprise.context.ApplicationScoped;
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Stream;
+import org.eclipse.microprofile.config.inject.ConfigProperty;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@ApplicationScoped
+public class PatternService {
+
+ private static final Logger log = LoggerFactory.getLogger(PatternService.class);
+ private final List loadedPatternSets = new ArrayList<>();
+ private final ObjectMapper yamlMapper = new ObjectMapper(new YAMLFactory());
+
+ @ConfigProperty(name = "pattern.directory")
+ String patternDirectoryPath;
+
+ @PostConstruct
+ void loadPatterns() {
+ log.info("Loading patterns from directory: {}", patternDirectoryPath);
+ File dir = new File(patternDirectoryPath);
+
+ if (!dir.exists() || !dir.isDirectory()) {
+ log.error(
+ "Pattern directory does not exist or is not a directory: {}",
+ patternDirectoryPath);
+ return;
+ }
+
+ try (Stream paths = Files.walk(Paths.get(patternDirectoryPath))) {
+ paths.filter(Files::isRegularFile)
+ .filter(
+ path ->
+ path.toString().endsWith(".yml")
+ || path.toString().endsWith(".yaml"))
+ .forEach(this::loadPatternFile);
+ } catch (IOException e) {
+ log.error("Error walking pattern directory: {}", patternDirectoryPath, e);
+ }
+
+ log.info("Successfully loaded {} pattern sets.", loadedPatternSets.size());
+ }
+
+ /**
+ * A helper method to parse a single YAML pattern file into a {@link PatternSet} object and add
+ * it to the in-memory list.
+ *
+ * @param path The path to the pattern file.
+ */
+ private void loadPatternFile(Path path) {
+ log.debug("Attempting to load pattern file: {}", path);
+ try {
+ PatternSet patternSet = yamlMapper.readValue(path.toFile(), PatternSet.class);
+ loadedPatternSets.add(patternSet);
+ } catch (IOException e) {
+ log.error("Failed to parse pattern file: {}", path, e);
+ }
+ }
+
+ /**
+ * Provides public, read-only access to all loaded pattern sets.
+ *
+ * @return An unmodifiable list of the loaded {@link PatternSet} objects.
+ */
+ public List getPatternSets() {
+ return Collections.unmodifiableList(loadedPatternSets);
+ }
+}
diff --git a/src/main/java/com/redhat/podmortem/service/ScoringService.java b/src/main/java/com/redhat/podmortem/service/ScoringService.java
new file mode 100644
index 0000000..44af2ff
--- /dev/null
+++ b/src/main/java/com/redhat/podmortem/service/ScoringService.java
@@ -0,0 +1,95 @@
+package com.redhat.podmortem.service;
+
+import com.redhat.podmortem.common.model.analysis.MatchedEvent;
+import com.redhat.podmortem.common.model.pattern.Pattern;
+import com.redhat.podmortem.common.model.pattern.SecondaryPattern;
+import jakarta.enterprise.context.ApplicationScoped;
+import java.util.List;
+import java.util.Map;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@ApplicationScoped
+public class ScoringService {
+
+ private static final Logger log = LoggerFactory.getLogger(ScoringService.class);
+
+ // multipliers for different severity levels.
+ private static final Map SEVERITY_MULTIPLIERS =
+ Map.of(
+ "CRITICAL", 1.5,
+ "HIGH", 1.2,
+ "MEDIUM", 1.0,
+ "LOW", 0.8,
+ "INFO", 0.5);
+
+ /**
+ * Calculates the final score for a matched event.
+ *
+ * @param event The primary matched event.
+ * @param allLines The complete array of log lines for context.
+ * @return The calculated score.
+ */
+ public double calculateScore(MatchedEvent event, String[] allLines) {
+ Pattern pattern = event.getMatchedPattern();
+ double baseScore = pattern.getPrimaryPattern().getConfidence();
+
+ // apply Severity Multiplier
+ double severityMultiplier =
+ SEVERITY_MULTIPLIERS.getOrDefault(pattern.getSeverity().toUpperCase(), 1.0);
+ double score = baseScore * severityMultiplier;
+
+ // apply proximity bonus from secondary patterns
+ double proximityBonus = 0.0;
+ List secondaryPatterns = pattern.getSecondaryPatterns();
+ if (secondaryPatterns != null && !secondaryPatterns.isEmpty()) {
+ for (SecondaryPattern secondary : secondaryPatterns) {
+ if (isSecondaryPatternPresent(secondary, event.getLineNumber() - 1, allLines)) {
+ proximityBonus += secondary.getWeight();
+ }
+ }
+ }
+
+ log.debug(
+ "Pattern '{}': Base Score={}, Severity Multiplier={}, Proximity Bonus={}",
+ pattern.getName(),
+ baseScore,
+ severityMultiplier,
+ proximityBonus);
+
+ // final score calculation
+ double finalScore = score + proximityBonus;
+
+ // ensure score is capped at 1.0
+ return Math.min(1.0, finalScore);
+ }
+
+ /**
+ * check if a secondary pattern is present within the proximity window of a primary match
+ *
+ * @param secondary The secondary pattern to search for.
+ * @param primaryMatchIndex The line number index of the primary match.
+ * @param allLines The complete array of log lines.
+ * @return True if the secondary pattern is found within the window, otherwise false.
+ */
+ private boolean isSecondaryPatternPresent(
+ SecondaryPattern secondary, int primaryMatchIndex, String[] allLines) {
+ int start = Math.max(0, primaryMatchIndex - secondary.getProximityWindow());
+ int end = Math.min(allLines.length, primaryMatchIndex + secondary.getProximityWindow() + 1);
+
+ for (int line = start; line < end; line++) {
+ if (line == primaryMatchIndex) {
+ continue; // don't match the primary line itself
+ }
+
+ if (secondary.getCompiledRegex().matcher(allLines[line]).find()) {
+ log.debug(
+ "Found secondary pattern '{}' for primary match at line {}",
+ secondary.getRegex(),
+ primaryMatchIndex + 1);
+ return true;
+ }
+ }
+ return false;
+ }
+}
diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties
index e69de29..b78dd54 100644
--- a/src/main/resources/application.properties
+++ b/src/main/resources/application.properties
@@ -0,0 +1,2 @@
+## log-parser properties ##
+pattern.directory=patterns
diff --git a/src/main/resources/settings.xml b/src/main/resources/settings.xml
index d61de62..4165079 100644
--- a/src/main/resources/settings.xml
+++ b/src/main/resources/settings.xml
@@ -9,4 +9,4 @@
${env.GITHUB_TOKEN}
-
\ No newline at end of file
+
diff --git a/src/test/java/com/redhat/podmortem/GreetingResourceIT.java b/src/test/java/com/redhat/podmortem/GreetingResourceIT.java
deleted file mode 100644
index 13be3ba..0000000
--- a/src/test/java/com/redhat/podmortem/GreetingResourceIT.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package com.redhat.podmortem;
-
-import io.quarkus.test.junit.QuarkusIntegrationTest;
-
-@QuarkusIntegrationTest
-class GreetingResourceIT extends GreetingResourceTest {
- // Execute the same tests but in packaged mode.
-}
diff --git a/src/test/java/com/redhat/podmortem/GreetingResourceTest.java b/src/test/java/com/redhat/podmortem/GreetingResourceTest.java
deleted file mode 100644
index a163ac9..0000000
--- a/src/test/java/com/redhat/podmortem/GreetingResourceTest.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package com.redhat.podmortem;
-
-import io.quarkus.test.junit.QuarkusTest;
-import org.junit.jupiter.api.Test;
-
-import static io.restassured.RestAssured.given;
-import static org.hamcrest.CoreMatchers.is;
-
-@QuarkusTest
-class GreetingResourceTest {
- @Test
- void testHelloEndpoint() {
- given()
- .when().get("/hello")
- .then()
- .statusCode(200)
- .body(is("Hello from Quarkus REST"));
- }
-
-}
\ No newline at end of file