Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,4 +58,4 @@ jobs:
ghcr.io/${{ github.repository_owner }}/podmortem-log-parser:${{ github.sha }}
build-args: |
GITHUB_USER=${{ github.actor }}
GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}
2 changes: 1 addition & 1 deletion .mvn/wrapper/maven-wrapper.properties
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,4 @@
wrapperVersion=3.3.2
distributionType=source
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip
wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.3.2/maven-wrapper-3.3.2.jar
wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.3.2/maven-wrapper-3.3.2.jar
5 changes: 2 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ repos:
hooks:
- id: spotless
name: spotless
entry: mvn spotless:apply
entry: bash -c 'mvn spotless:apply'
language: system
files: \.java$
stages: [pre-commit]
files: \.(java|xml|json|md)$
10 changes: 10 additions & 0 deletions patterns/spring-boot-test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
metadata:
library_id: "spring-boot-test"
version: "1.0.0"
patterns:
- id: "test_pattern_1"
name: "Test Pattern"
primary_pattern:
regex: ".*ERROR.*"
confidence: 0.8
severity: "HIGH"
35 changes: 33 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
<quarkus.platform.version>3.23.4</quarkus.platform.version>
<skipITs>true</skipITs>
<surefire-plugin.version>3.5.3</surefire-plugin.version>

<podmortem.common.lib.version>1.0-f3f9123-SNAPSHOT</podmortem.common.lib.version>
</properties>

<repositories>
Expand All @@ -34,6 +36,13 @@
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>${quarkus.platform.group-id}</groupId>
<artifactId>quarkus-operator-sdk-bom</artifactId>
<version>${quarkus.platform.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>

Expand All @@ -46,6 +55,19 @@
<groupId>io.quarkus</groupId>
<artifactId>quarkus-arc</artifactId>
</dependency>
<dependency>
<groupId>com.redhat.podmortem</groupId>
<artifactId>common</artifactId>
<version>${podmortem.common.lib.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-yaml</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-junit5</artifactId>
Expand All @@ -70,10 +92,19 @@
<version>1.17.0</version>
<style>AOSP</style>
</googleJavaFormat>
<removeUnusedImports/>
<trimTrailingWhitespace/>
<removeUnusedImports />
<trimTrailingWhitespace />
</java>
</configuration>
<executions>
<execution>
<id>spotless-check</id>
<phase>verify</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>${quarkus.platform.group-id}</groupId>
Expand Down
16 changes: 0 additions & 16 deletions src/main/java/com/redhat/podmortem/GreetingResource.java

This file was deleted.

44 changes: 44 additions & 0 deletions src/main/java/com/redhat/podmortem/rest/Parse.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
package com.redhat.podmortem.rest;

import com.redhat.podmortem.common.model.analysis.AnalysisResult;
import com.redhat.podmortem.common.model.kube.podmortem.PodFailureData;
import com.redhat.podmortem.service.AnalysisService;
import jakarta.inject.Inject;
import jakarta.ws.rs.Consumes;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Path("/parse")
public class Parse {

private static final Logger log = LoggerFactory.getLogger(Parse.class);

@Inject AnalysisService analysisService;

@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response parseLogs(PodFailureData data) {
if (data == null || data.getPod() == null) {
return Response.status(Response.Status.BAD_REQUEST)
.entity("{\"error\":\"Invalid PodFailureData provided\"}")
.build();
}

log.info("Received analysis request for pod: {}", data.getPod().getMetadata().getName());

AnalysisResult result = analysisService.analyze(data);

log.info(
"Analysis complete for pod: {}. Found {} significant events.",
data.getPod().getMetadata().getName(),
result.getSummary().getSignificantEvents());

return Response.ok(result).build();
}
}
197 changes: 197 additions & 0 deletions src/main/java/com/redhat/podmortem/service/AnalysisService.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,197 @@
package com.redhat.podmortem.service;

import com.redhat.podmortem.common.model.analysis.AnalysisMetadata;
import com.redhat.podmortem.common.model.analysis.AnalysisResult;
import com.redhat.podmortem.common.model.analysis.AnalysisSummary;
import com.redhat.podmortem.common.model.analysis.EventContext;
import com.redhat.podmortem.common.model.analysis.MatchedEvent;
import com.redhat.podmortem.common.model.kube.podmortem.PodFailureData;
import com.redhat.podmortem.common.model.pattern.ContextExtraction;
import com.redhat.podmortem.common.model.pattern.Pattern;
import com.redhat.podmortem.common.model.pattern.PatternSet;
import com.redhat.podmortem.common.model.pattern.SecondaryPattern;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@ApplicationScoped
public class AnalysisService {

private static final Logger log = LoggerFactory.getLogger(AnalysisService.class);

@Inject PatternService patternService;

@Inject ScoringService scoringService;

/**
* Analyzes the provided pod failure data against the loaded pattern sets.
*
* @param data The collected data from a failed pod, including logs.
* @return An {@link AnalysisResult} object containing all findings.
*/
public AnalysisResult analyze(PodFailureData data) {
long startTime = System.currentTimeMillis();
List<MatchedEvent> foundEvents = new ArrayList<>();
String[] logLines = data.getLogs().split("\\r?\\n");

// pre-compile all regex patterns
for (PatternSet patternSet : patternService.getPatternSets()) {
if (patternSet.getPatterns() == null) {
continue;
}
for (Pattern pattern : patternSet.getPatterns()) {
// compile primary pattern
pattern.getPrimaryPattern()
.setCompiledRegex(
java.util.regex.Pattern.compile(
pattern.getPrimaryPattern().getRegex()));

// compile secondary patterns
if (pattern.getSecondaryPatterns() != null) {
for (SecondaryPattern sp : pattern.getSecondaryPatterns()) {
sp.setCompiledRegex(java.util.regex.Pattern.compile(sp.getRegex()));
}
}
}
}

// look for matches in the logs
for (int logLine = 0; logLine < logLines.length; logLine++) {
String line = logLines[logLine];
for (var patternSet : patternService.getPatternSets()) {
for (var pattern : patternSet.getPatterns()) {
Matcher matcher = pattern.getPrimaryPattern().getCompiledRegex().matcher(line);

if (matcher.find()) {
log.info(
"Line {}: Found match for pattern '{}'",
logLine + 1,
pattern.getName());
MatchedEvent event = new MatchedEvent();
event.setLineNumber(logLine + 1);
event.setMatchedPattern(pattern);
event.setContext(
extractContext(logLines, logLine, pattern.getContextExtraction()));

double score = scoringService.calculateScore(event, logLines);
event.setScore(score);

foundEvents.add(event);
}
}
}
}

AnalysisResult result = new AnalysisResult();
result.setEvents(foundEvents);
result.setAnalysisId(UUID.randomUUID().toString());
result.setMetadata(buildMetadata(startTime, logLines, patternService.getPatternSets()));
result.setSummary(buildSummary(foundEvents));

return result;
}

/**
* Extracts the surrounding log lines based on the pattern's extraction rules.
*
* @param allLines The complete array of log lines.
* @param matchIndex The index of the line where the primary pattern matched.
* @param rules The context extraction rules from the matched pattern.
* @return An {@link EventContext} object populated with the relevant lines.
*/
private EventContext extractContext(
String[] allLines, int matchIndex, ContextExtraction rules) {
EventContext context = new EventContext();
context.setMatchedLine(allLines[matchIndex]);

if (rules == null) {
return context;
}

// get lines before the match
int beforeStart = Math.max(0, matchIndex - rules.getLinesBefore());
List<String> beforeLines =
Arrays.asList(Arrays.copyOfRange(allLines, beforeStart, matchIndex));
context.setLinesBefore(beforeLines);

// get lines after the match
int afterEnd = Math.min(allLines.length, matchIndex + 1 + rules.getLinesAfter());
List<String> afterLines =
Arrays.asList(Arrays.copyOfRange(allLines, matchIndex + 1, afterEnd));
context.setLinesAfter(afterLines);

// TODO: Implement stack trace detection logic based on rules.getIncludeStackTrace()

return context;
}

/**
* Builds the metadata object for the analysis result.
*
* @param startTime The timestamp when the analysis began.
* @param logLines The complete array of log lines.
* @param patternSets The list of pattern sets used in the analysis.
* @return A populated {@link AnalysisMetadata} object.
*/
private AnalysisMetadata buildMetadata(
long startTime, String[] logLines, List<PatternSet> patternSets) {
AnalysisMetadata metadata = new AnalysisMetadata();
metadata.setProcessingTimeMs(System.currentTimeMillis() - startTime);
metadata.setTotalLines(logLines.length);
metadata.setAnalyzedAt(Instant.now().toString());

List<String> patternsUsed =
patternSets.stream()
.map(ps -> ps.getMetadata().getLibraryId())
.collect(Collectors.toList());
metadata.setPatternsUsed(patternsUsed);

return metadata;
}

/**
* Builds the summary object for the analysis result.
*
* @param events The list of all events found during the analysis.
* @return A populated {@link AnalysisSummary} object.
*/
private AnalysisSummary buildSummary(List<MatchedEvent> events) {
AnalysisSummary summary = new AnalysisSummary();
summary.setSignificantEvents(events.size());

if (events.isEmpty()) {
summary.setHighestSeverity("NONE");
summary.setSeverityDistribution(Map.of());
return summary;
}

// calculate severity distribution
Map<String, Long> distribution =
events.stream()
.map(e -> e.getMatchedPattern().getSeverity().toUpperCase())
.collect(Collectors.groupingBy(Function.identity(), Collectors.counting()));
summary.setSeverityDistribution(distribution);

// determine highest severity
List<String> severityOrder = List.of("INFO", "LOW", "MEDIUM", "HIGH", "CRITICAL");
String highestSeverity =
events.stream()
.map(e -> e.getMatchedPattern().getSeverity().toUpperCase())
.max(Comparator.comparingInt(severityOrder::indexOf))
.orElse("NONE");
summary.setHighestSeverity(highestSeverity);

return summary;
}
}
Loading
Loading