Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -89,30 +89,34 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
*
* @param manifest The manifest for an automated ingest job.
*/
AutoIngestJob(Manifest manifest) {
/*
* Version 0 fields.
*/
this.manifest = manifest;
this.nodeName = "";
this.caseDirectoryPath = "";
this.priority = DEFAULT_PRIORITY;
this.stage = Stage.PENDING;
this.stageStartDate = manifest.getDateFileCreated();
this.dataSourceProcessor = null;
this.ingestJob = null;
this.cancelled = false;
this.completed = false;
this.completedDate = new Date(0);
this.errorsOccurred = false;

/*
* Version 1 fields.
*/
this.version = CURRENT_VERSION;
this.processingStatus = ProcessingStatus.PENDING;
this.numberOfCrashes = 0;
this.stageDetails = this.getProcessingStageDetails();
AutoIngestJob(Manifest manifest) throws AutoIngestJobException {
try {
/*
* Version 0 fields.
*/
this.manifest = manifest;
this.nodeName = "";
this.caseDirectoryPath = "";
this.priority = DEFAULT_PRIORITY;
this.stage = Stage.PENDING;
this.stageStartDate = manifest.getDateFileCreated();
this.dataSourceProcessor = null;
this.ingestJob = null;
this.cancelled = false;
this.completed = false;
this.completedDate = new Date(0);
this.errorsOccurred = false;

/*
* Version 1 fields.
*/
this.version = CURRENT_VERSION;
this.processingStatus = ProcessingStatus.PENDING;
this.numberOfCrashes = 0;
this.stageDetails = this.getProcessingStageDetails();
} catch (Exception ex) {
throw new AutoIngestJobException(String.format("Error creating automated ingest job"), ex);
}
}

/**
Expand All @@ -122,30 +126,34 @@ public final class AutoIngestJob implements Comparable<AutoIngestJob>, Serializa
* @param nodeData The coordination service node data for an automated
* ingest job.
*/
AutoIngestJob(AutoIngestJobNodeData nodeData) {
/*
* Version 0 fields.
*/
this.manifest = new Manifest(nodeData.getManifestFilePath(), nodeData.getManifestFileDate(), nodeData.getCaseName(), nodeData.getDeviceId(), nodeData.getDataSourcePath(), Collections.emptyMap());
this.nodeName = nodeData.getProcessingHostName();
this.caseDirectoryPath = nodeData.getCaseDirectoryPath().toString();
this.priority = nodeData.getPriority();
this.stage = nodeData.getProcessingStage();
this.stageStartDate = nodeData.getProcessingStageStartDate();
this.dataSourceProcessor = null; // Transient data not in node data.
this.ingestJob = null; // Transient data not in node data.
this.cancelled = false; // Transient data not in node data.
this.completed = false; // Transient data not in node data.
this.completedDate = nodeData.getCompletedDate();
this.errorsOccurred = nodeData.getErrorsOccurred();

/*
* Version 1 fields.
*/
this.version = CURRENT_VERSION;
this.processingStatus = nodeData.getProcessingStatus();
this.numberOfCrashes = nodeData.getNumberOfCrashes();
this.stageDetails = this.getProcessingStageDetails();
AutoIngestJob(AutoIngestJobNodeData nodeData) throws AutoIngestJobException {
try {
/*
* Version 0 fields.
*/
this.manifest = new Manifest(nodeData.getManifestFilePath(), nodeData.getManifestFileDate(), nodeData.getCaseName(), nodeData.getDeviceId(), nodeData.getDataSourcePath(), Collections.emptyMap());
this.nodeName = nodeData.getProcessingHostName();
this.caseDirectoryPath = nodeData.getCaseDirectoryPath().toString();
this.priority = nodeData.getPriority();
this.stage = nodeData.getProcessingStage();
this.stageStartDate = nodeData.getProcessingStageStartDate();
this.dataSourceProcessor = null; // Transient data not in node data.
this.ingestJob = null; // Transient data not in node data.
this.cancelled = false; // Transient data not in node data.
this.completed = false; // Transient data not in node data.
this.completedDate = nodeData.getCompletedDate();
this.errorsOccurred = nodeData.getErrorsOccurred();

/*
* Version 1 fields.
*/
this.version = CURRENT_VERSION;
this.processingStatus = nodeData.getProcessingStatus();
this.numberOfCrashes = nodeData.getNumberOfCrashes();
this.stageDetails = this.getProcessingStageDetails();
} catch (Exception ex) {
throw new AutoIngestJobException(String.format("Error creating automated ingest job"), ex);
}
}

/**
Expand Down Expand Up @@ -622,5 +630,33 @@ Date getStartDate() {
}

}

/**
* Exception thrown when there is a problem creating auto ingest job.
*/
final static class AutoIngestJobException extends Exception {

private static final long serialVersionUID = 1L;

/**
* Constructs an exception to throw when there is a problem creating
* auto ingest job.
*
* @param message The exception message.
*/
private AutoIngestJobException(String message) {
super(message);
}

/**
* Constructs an exception to throw when there is a problem creating
* auto ingest job.
*
* @param message The exception message.
* @param cause The cause of the exception, if it was an exception.
*/
private AutoIngestJobException(String message, Throwable cause) {
super(message, cause);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.Immutable;
import javax.annotation.concurrent.ThreadSafe;
import org.openide.util.Exceptions;
import org.openide.util.Lookup;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.Case.CaseType;
Expand Down Expand Up @@ -93,6 +94,7 @@
import org.sleuthkit.autopsy.experimental.configuration.SharedConfiguration.SharedConfigurationException;
import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor;
import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.AutoIngestJobException;
import org.sleuthkit.autopsy.ingest.IngestJob;
import org.sleuthkit.autopsy.ingest.IngestJob.CancellationReason;
import org.sleuthkit.autopsy.ingest.IngestJobSettings;
Expand Down Expand Up @@ -759,7 +761,7 @@ CaseDeletionResult deleteCase(String caseName, Path caseDirectoryPath) {
AutoIngestJob deletedJob = new AutoIngestJob(nodeData);
deletedJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.DELETED);
this.updateCoordinationServiceNode(deletedJob);
} catch (AutoIngestJobNodeData.InvalidDataException ex) {
} catch (AutoIngestJobNodeData.InvalidDataException | AutoIngestJobException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Invalid auto ingest job node data for %s", manifestPath), ex);
return CaseDeletionResult.PARTIALLY_DELETED;
} catch (InterruptedException | CoordinationServiceException ex) {
Expand Down Expand Up @@ -1015,92 +1017,103 @@ public FileVisitResult preVisitDirectory(Path dirPath, BasicFileAttributes dirAt
* @return TERMINATE if auto ingest is shutting down, CONTINUE if it has
* not.
*
* @throws IOException if an I/O error occurs, but this implementation
* does not throw.
*/
@Override
public FileVisitResult visitFile(Path filePath, BasicFileAttributes attrs) throws IOException {
public FileVisitResult visitFile(Path filePath, BasicFileAttributes attrs) {
if (Thread.currentThread().isInterrupted()) {
return TERMINATE;
}

Manifest manifest = null;
for (ManifestFileParser parser : Lookup.getDefault().lookupAll(ManifestFileParser.class)) {
if (parser.fileIsManifest(filePath)) {
try {
manifest = parser.parse(filePath);
break;
} catch (ManifestFileParserException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to parse %s with parser %s", filePath, parser.getClass().getCanonicalName()), ex);
try {
Manifest manifest = null;
for (ManifestFileParser parser : Lookup.getDefault().lookupAll(ManifestFileParser.class)) {
if (parser.fileIsManifest(filePath)) {
try {
manifest = parser.parse(filePath);
break;
} catch (ManifestFileParserException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to parse %s with parser %s", filePath, parser.getClass().getCanonicalName()), ex);
}
}
if (Thread.currentThread().isInterrupted()) {
return TERMINATE;
}
}

if (Thread.currentThread().isInterrupted()) {
return TERMINATE;
}
}

if (Thread.currentThread().isInterrupted()) {
return TERMINATE;
}

if (null != manifest) {
/*
if (null != manifest) {
/*
* Update the mapping of case names to manifest paths that is
* used for case deletion.
*/
String caseName = manifest.getCaseName();
Path manifestPath = manifest.getFilePath();
if (casesToManifests.containsKey(caseName)) {
Set<Path> manifestPaths = casesToManifests.get(caseName);
manifestPaths.add(manifestPath);
} else {
Set<Path> manifestPaths = new HashSet<>();
manifestPaths.add(manifestPath);
casesToManifests.put(caseName, manifestPaths);
}
*/
String caseName = manifest.getCaseName();
Path manifestPath = manifest.getFilePath();
if (casesToManifests.containsKey(caseName)) {
Set<Path> manifestPaths = casesToManifests.get(caseName);
manifestPaths.add(manifestPath);
} else {
Set<Path> manifestPaths = new HashSet<>();
manifestPaths.add(manifestPath);
casesToManifests.put(caseName, manifestPaths);
}

/*
/*
* Add a job to the pending jobs queue, the completed jobs list,
* or do crashed job recovery, as required.
*/
try {
byte[] rawData = coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString());
if (null != rawData && rawData.length > 0) {
try {
AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(rawData);
AutoIngestJob.ProcessingStatus processingStatus = nodeData.getProcessingStatus();
switch (processingStatus) {
case PENDING:
addPendingJob(manifest, nodeData);
break;
case PROCESSING:
doRecoveryIfCrashed(manifest, nodeData);
break;
case COMPLETED:
addCompletedJob(manifest, nodeData);
break;
case DELETED:
/*
*/
try {
byte[] rawData = coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString());
if (null != rawData && rawData.length > 0) {
try {
AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(rawData);
AutoIngestJob.ProcessingStatus processingStatus = nodeData.getProcessingStatus();
switch (processingStatus) {
case PENDING:
addPendingJob(manifest, nodeData);
break;
case PROCESSING:
doRecoveryIfCrashed(manifest, nodeData);
break;
case COMPLETED:
addCompletedJob(manifest, nodeData);
break;
case DELETED:
/*
* Ignore jobs marked as "deleted."
*/
break;
default:
SYS_LOGGER.log(Level.SEVERE, "Unknown ManifestNodeData.ProcessingStatus");
break;
*/
break;
default:
SYS_LOGGER.log(Level.SEVERE, "Unknown ManifestNodeData.ProcessingStatus");
break;
}
} catch (AutoIngestJobNodeData.InvalidDataException | AutoIngestJobException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Invalid auto ingest job node data for %s", manifestPath), ex);
}
} else {
try {
addNewPendingJob(manifest);
} catch (AutoIngestJobException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Invalid manifest data for %s", manifestPath), ex);
}
} catch (AutoIngestJobNodeData.InvalidDataException ex) {
SYS_LOGGER.log(Level.WARNING, String.format("Invalid auto ingest job node data for %s", manifestPath), ex);
}
} else {
addNewPendingJob(manifest);
} catch (CoordinationServiceException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error transmitting node data for %s", manifestPath), ex);
return CONTINUE;
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
return TERMINATE;
}
} catch (CoordinationServiceException ex) {
SYS_LOGGER.log(Level.SEVERE, String.format("Error transmitting node data for %s", manifestPath), ex);
return CONTINUE;
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
return TERMINATE;
}

} catch (Exception ex) {
// Catch all unhandled and unexpected exceptions. Otherwise one bad file
// can stop the entire input folder scanning. Given that the exception is unexpected,
// I'm hesitant to add logging which requires accessing or de-referencing data.
SYS_LOGGER.log(Level.SEVERE, "Unexpected exception in file visitor", ex);
return CONTINUE;
}

if (!Thread.currentThread().isInterrupted()) {
Expand All @@ -1122,7 +1135,7 @@ public FileVisitResult visitFile(Path filePath, BasicFileAttributes attrs) throw
* blocked, i.e., if auto ingest is
* shutting down.
*/
private void addPendingJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException {
private void addPendingJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException, AutoIngestJobException {
AutoIngestJob job;
if (nodeData.getVersion() == AutoIngestJobNodeData.getCurrentVersion()) {
job = new AutoIngestJob(nodeData);
Expand Down Expand Up @@ -1176,7 +1189,7 @@ private void addPendingJob(Manifest manifest, AutoIngestJobNodeData nodeData) th
* blocked, i.e., if auto ingest is
* shutting down.
*/
private void addNewPendingJob(Manifest manifest) throws InterruptedException {
private void addNewPendingJob(Manifest manifest) throws InterruptedException, AutoIngestJobException {
/*
* Create the coordination service node data for the job. Note that
* getting the lock will create the node for the job (with no data)
Expand Down Expand Up @@ -1218,7 +1231,7 @@ private void addNewPendingJob(Manifest manifest) throws InterruptedException {
* blocked, i.e., if auto ingest is
* shutting down.
*/
private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException {
private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException, AutoIngestJobException {
/*
* Try to get an exclusive lock on the coordination service node for
* the job. If the lock cannot be obtained, another host in the auto
Expand Down Expand Up @@ -1314,7 +1327,7 @@ private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData nodeDa
* @throws CoordinationServiceException
* @throws InterruptedException
*/
private void addCompletedJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws CoordinationServiceException, InterruptedException {
private void addCompletedJob(Manifest manifest, AutoIngestJobNodeData nodeData) throws CoordinationServiceException, InterruptedException, AutoIngestJobException {
Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName());
if (null != caseDirectoryPath) {
AutoIngestJob job;
Expand Down
Loading