diff --git a/Jenkinsfile b/Jenkinsfile
index 16223c0..0a63820 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -2,8 +2,8 @@ pipeline {
agent any
environment {
- JDK_TOOL_NAME = 'JDK 11'
- MAVEN_TOOL_NAME = 'Maven 3.8.6'
+ JDK_TOOL_NAME = 'JDK 21'
+ MAVEN_TOOL_NAME = 'Maven 3.9.9'
}
options {
@@ -48,7 +48,7 @@ pipeline {
stage('Code Analysis') {
when {
anyOf {
- branch 'master'
+ branch 'main'
tag 'v*'
changeRequest()
}
@@ -58,7 +58,7 @@ pipeline {
}
steps {
withMaven(jdk: env.JDK_TOOL_NAME, maven: env.MAVEN_TOOL_NAME) {
- sh 'mvn pmd:pmd pmd:cpd spotbugs:spotbugs'
+ sh 'mvn -DskipTests=true package pmd:pmd pmd:cpd spotbugs:spotbugs'
}
}
post {
@@ -70,7 +70,7 @@ pipeline {
stage('Unit Tests') {
when {
anyOf {
- branch 'master'
+ branch 'main'
tag 'v*'
changeRequest()
}
@@ -92,13 +92,13 @@ pipeline {
stage('Integration Tests') {
when {
anyOf {
- branch 'master'
+ branch 'main'
tag 'v*'
changeRequest()
}
}
options {
- timeout(activity: true, time: 120, unit: 'SECONDS')
+ timeout(activity: true, time: 1800, unit: 'SECONDS')
}
steps {
withMaven(jdk: env.JDK_TOOL_NAME, maven: env.MAVEN_TOOL_NAME) {
@@ -110,14 +110,17 @@ pipeline {
junit testResults: '**/target/failsafe-reports/TEST-*.xml', allowEmptyResults: true
}
success {
- publishCoverage adapters: [jacoco(mergeToOneReport: true, path: '**/target/site/jacoco/jacoco.xml')]
+ discoverReferenceBuild()
+ recordCoverage(tools: [[ parser: 'JACOCO' ]],
+ id: 'jacoco', name: 'JaCoCo Coverage',
+ sourceCodeRetention: 'LAST_BUILD')
}
}
}
stage('Deploy Jar to Internal Nexus Repository') {
when {
anyOf {
- branch 'master'
+ branch 'main'
tag 'v*'
}
}
diff --git a/pom.xml b/pom.xml
index f4d6377..64dc3a2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
de.rub.nds
protocol-toolkit-bom
- 4.4.0
+ 5.2.0
crawler-core
@@ -80,8 +80,8 @@
UTF-8
UTF-8
- 11
- 11
+ 21
+ 21
false
${skipTests}
@@ -222,11 +222,11 @@
4
- 1.15.0
+ 1.25.2
- ${project.basedir}/license_header_plain.txt
+ ${basedir}/license_header_plain.txt
diff --git a/src/main/java/de/rub/nds/crawler/core/jobs/PublishBulkScanJob.java b/src/main/java/de/rub/nds/crawler/core/jobs/PublishBulkScanJob.java
index 877de05..1459b1a 100644
--- a/src/main/java/de/rub/nds/crawler/core/jobs/PublishBulkScanJob.java
+++ b/src/main/java/de/rub/nds/crawler/core/jobs/PublishBulkScanJob.java
@@ -78,16 +78,22 @@ public void execute(JobExecutionContext context) throws JobExecutionException {
long submittedJobs = parsedJobStatuses.getOrDefault(JobStatus.TO_BE_EXECUTED, 0L);
long unresolvedJobs = parsedJobStatuses.getOrDefault(JobStatus.UNRESOLVABLE, 0L);
long denylistedJobs = parsedJobStatuses.getOrDefault(JobStatus.DENYLISTED, 0L);
- long resolutionErrorJobs = parsedJobStatuses.getOrDefault(JobStatus.RESOLUTION_ERROR, 0L);
+ long resolutionErrorJobs =
+ parsedJobStatuses.getOrDefault(JobStatus.RESOLUTION_ERROR, 0L);
bulkScan.setScanJobsPublished(submittedJobs);
- bulkScan.setScanJobsResolutionErrors(unresolvedJobs+resolutionErrorJobs);
+ bulkScan.setScanJobsResolutionErrors(unresolvedJobs + resolutionErrorJobs);
bulkScan.setScanJobsDenylisted(denylistedJobs);
persistenceProvider.updateBulkScan(bulkScan);
if (controllerConfig.isMonitored() && submittedJobs == 0) {
progressMonitor.stopMonitoringAndFinalizeBulkScan(bulkScan.get_id());
}
- LOGGER.info("Submitted {} scan jobs to RabbitMq (Not submitted: {} Unresolvable, {} Denylisted, {} unhandled Error)", submittedJobs,unresolvedJobs, denylistedJobs, resolutionErrorJobs);
+ LOGGER.info(
+ "Submitted {} scan jobs to RabbitMq (Not submitted: {} Unresolvable, {} Denylisted, {} unhandled Error)",
+ submittedJobs,
+ unresolvedJobs,
+ denylistedJobs,
+ resolutionErrorJobs);
} catch (Exception e) {
LOGGER.error("Exception while publishing BulkScan: ", e);
JobExecutionException e2 = new JobExecutionException(e);
@@ -124,17 +130,21 @@ public JobStatus apply(String targetString) {
var targetInfo =
ScanTarget.fromTargetString(targetString, defaultPort, denylistProvider);
jobDescription =
- new ScanJobDescription(targetInfo.getLeft(), bulkScan, targetInfo.getRight());
- }catch (Exception e){
- jobDescription = new ScanJobDescription(new ScanTarget(), bulkScan, JobStatus.RESOLUTION_ERROR);
+ new ScanJobDescription(
+ targetInfo.getLeft(), bulkScan, targetInfo.getRight());
+ } catch (Exception e) {
+ jobDescription =
+ new ScanJobDescription(
+ new ScanTarget(), bulkScan, JobStatus.RESOLUTION_ERROR);
errorResult = ScanResult.fromException(jobDescription, e);
- LOGGER.error("Error while creating ScanJobDescription for target '{}'", targetString, e);
+ LOGGER.error(
+ "Error while creating ScanJobDescription for target '{}'", targetString, e);
}
if (jobDescription.getStatus() == JobStatus.TO_BE_EXECUTED) {
orchestrationProvider.submitScanJob(jobDescription);
} else {
- if(errorResult == null){
+ if (errorResult == null) {
errorResult = new ScanResult(jobDescription, null);
}
persistenceProvider.insertScanResult(errorResult, jobDescription);
diff --git a/src/main/java/de/rub/nds/crawler/targetlist/TargetFileProvider.java b/src/main/java/de/rub/nds/crawler/targetlist/TargetFileProvider.java
index 2242653..f1834f4 100644
--- a/src/main/java/de/rub/nds/crawler/targetlist/TargetFileProvider.java
+++ b/src/main/java/de/rub/nds/crawler/targetlist/TargetFileProvider.java
@@ -33,8 +33,9 @@ public List getTargetList() {
List targetList;
try (Stream lines = Files.lines(Paths.get(filename))) {
// remove comments and empty lines
- targetList = lines.filter(line -> !(line.startsWith("#") || line.isEmpty()))
- .collect(Collectors.toList());
+ targetList =
+ lines.filter(line -> !(line.startsWith("#") || line.isEmpty()))
+ .collect(Collectors.toList());
} catch (IOException ex) {
throw new RuntimeException("Could not load " + filename, ex);
}