diff --git a/api/src/org/labkey/api/admin/FolderImportContext.java b/api/src/org/labkey/api/admin/FolderImportContext.java index 006b704a66a..786f5a31650 100644 --- a/api/src/org/labkey/api/admin/FolderImportContext.java +++ b/api/src/org/labkey/api/admin/FolderImportContext.java @@ -29,11 +29,10 @@ import org.labkey.api.util.XmlValidationException; import org.labkey.api.writer.VirtualFile; import org.labkey.folder.xml.FolderDocument; +import org.labkey.vfs.FileLike; import java.io.IOException; import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -47,7 +46,7 @@ */ public class FolderImportContext extends AbstractFolderContext { - private Path _folderXml; + private FileLike _folderXml; private String _xarJobId; @@ -66,7 +65,7 @@ public FolderImportContext() super(null, null, null, null, null, null); } - public FolderImportContext(User user, Container c, Path folderXml, Set dataTypes, LoggerGetter logger, VirtualFile root) + public FolderImportContext(User user, Container c, FileLike folderXml, Set dataTypes, LoggerGetter logger, VirtualFile root) { super(user, c, null, dataTypes, logger, root); _folderXml = folderXml; @@ -112,17 +111,17 @@ public synchronized FolderDocument getDocument() throws ImportException return folderDoc; } - private FolderDocument readFolderDocument(Path folderXml) throws ImportException, IOException + private FolderDocument readFolderDocument(FileLike folderXml) throws ImportException, IOException { - if (!Files.exists(folderXml)) - throw new ImportException(folderXml.getFileName() + " file does not exist."); + if (!folderXml.exists()) + throw new ImportException(folderXml.getName() + " file does not exist."); FolderDocument folderDoc; - try (InputStream inputStream = Files.newInputStream(folderXml)) + try (InputStream inputStream = folderXml.openInputStream()) { folderDoc = FolderDocument.Factory.parse(inputStream, XmlBeansUtil.getDefaultParseOptions()); - XmlBeansUtil.validateXmlDocument(folderDoc, folderXml.getFileName().toString()); + XmlBeansUtil.validateXmlDocument(folderDoc, folderXml.getName()); } catch (XmlException | XmlValidationException e) { diff --git a/api/src/org/labkey/api/admin/ImportOptions.java b/api/src/org/labkey/api/admin/ImportOptions.java index 020a175dbc3..ce40a2f73cd 100644 --- a/api/src/org/labkey/api/admin/ImportOptions.java +++ b/api/src/org/labkey/api/admin/ImportOptions.java @@ -19,6 +19,7 @@ import org.labkey.api.data.Activity; import org.labkey.api.security.User; import org.labkey.api.security.UserManager; +import org.labkey.vfs.FileLike; import java.nio.file.Path; import java.util.Collection; @@ -42,7 +43,7 @@ public class ImportOptions private final Collection _messages = new LinkedList<>(); private Set _dataTypes; private Activity _activity; - private Path _analysisDir; + private FileLike _analysisDir; private String _folderArchiveSourceName = null; private boolean _isNewFolderImport; // if we know the target folder is empty, can skip certain merge logic @@ -153,12 +154,12 @@ public void setActivity(Activity activity) _activity = activity; } - public Path getAnalysisDir() + public FileLike getAnalysisDir() { return _analysisDir; } - public void setAnalysisDir(Path analysisDir) + public void setAnalysisDir(FileLike analysisDir) { _analysisDir = analysisDir; } diff --git a/api/src/org/labkey/api/admin/InvalidFileException.java b/api/src/org/labkey/api/admin/InvalidFileException.java index c156765b723..e5182175593 100644 --- a/api/src/org/labkey/api/admin/InvalidFileException.java +++ b/api/src/org/labkey/api/admin/InvalidFileException.java @@ -20,18 +20,19 @@ import org.apache.xmlbeans.XmlException; import org.labkey.api.util.XmlValidationException; import org.labkey.api.writer.VirtualFile; +import org.labkey.vfs.FileLike; import java.io.File; import java.nio.file.Path; public class InvalidFileException extends ImportException { - @Deprecated // prefer the Path version - public InvalidFileException(File root, File file, Throwable t) + public InvalidFileException(FileLike root, FileLike file, Throwable t) { - this(root.toPath(), file.toPath(), t); + this(root.toNioPathForRead(), file.toNioPathForRead(), t); } + @Deprecated // prefer the FileLike version public InvalidFileException(Path root, Path file, Throwable t) { super(getErrorString(root, file, t.getMessage())); @@ -42,21 +43,11 @@ public InvalidFileException(VirtualFile root, File file, Throwable t) super(getErrorString(root.getRelativePath(file.getName()), t.getMessage())); } - public InvalidFileException(File root, File file, XmlException e) - { - super(getErrorString(root, file, e)); - } - public InvalidFileException(VirtualFile root, File file, XmlException e) { super(getErrorString(root, file, e)); } - public InvalidFileException(File root, File file, XmlValidationException e) - { - super(getErrorString(root, file, (String)null), e); - } - public InvalidFileException(VirtualFile root, File file, XmlValidationException e) { super(getErrorString(root.getRelativePath(file.getName()), null), e); diff --git a/api/src/org/labkey/api/assay/AbstractAssayProvider.java b/api/src/org/labkey/api/assay/AbstractAssayProvider.java index 666153638d5..5dc3c2cf5a1 100644 --- a/api/src/org/labkey/api/assay/AbstractAssayProvider.java +++ b/api/src/org/labkey/api/assay/AbstractAssayProvider.java @@ -116,7 +116,6 @@ import org.labkey.api.view.NotFoundException; import org.labkey.api.view.ViewContext; import org.labkey.vfs.FileLike; -import org.labkey.vfs.FileSystemLike; import org.springframework.validation.BindException; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.mvc.Controller; @@ -285,12 +284,7 @@ public ActionURL linkToStudy(User user, Container assayDataContainer, ExpProtoco dataMap.put(StudyPublishService.TARGET_STUDY_PROPERTY_NAME, targetStudyContainer); // Remember which rows we're planning to link, partitioned by the target study - Set rowIds = rowIdsByTargetContainer.get(targetStudyContainer); - if (rowIds == null) - { - rowIds = new HashSet<>(); - rowIdsByTargetContainer.put(targetStudyContainer, rowIds); - } + Set rowIds = rowIdsByTargetContainer.computeIfAbsent(targetStudyContainer, k -> new HashSet<>()); rowIds.add(publishKey.getDataId()); dataMaps.add(dataMap); @@ -732,7 +726,7 @@ private void addReusableData(Map reusableFiles, } @Override - public AssayRunCreator getRunCreator() + public AssayRunCreator getRunCreator() { return new DefaultAssayRunCreator<>(this); } @@ -1459,13 +1453,13 @@ public AssaySaveHandler getSaveHandler() } @Override - public AssayRunUploadContext.Factory createRunUploadFactory(ExpProtocol protocol, ViewContext context) + public AssayRunUploadContext.Factory> createRunUploadFactory(ExpProtocol protocol, ViewContext context) { return new AssayRunUploadContextImpl.Factory<>(protocol, this, context); } @Override - public AssayRunUploadContext.Factory createRunUploadFactory(ExpProtocol protocol, User user, Container c) + public AssayRunUploadContext.Factory> createRunUploadFactory(ExpProtocol protocol, User user, Container c) { return new AssayRunUploadContextImpl.Factory<>(protocol, this, user, c); } @@ -1518,15 +1512,15 @@ public DataExchangeHandler createDataExchangeHandler() } @Override - public AssayRunDatabaseContext createRunDatabaseContext(ExpRun run, User user, HttpServletRequest request) + public AssayRunDatabaseContext createRunDatabaseContext(ExpRun run, User user, HttpServletRequest request) { - return new AssayRunDatabaseContext(run, user, request); + return new AssayRunDatabaseContext<>(run, user, request); } @Override public AssayRunAsyncContext createRunAsyncContext(AssayRunUploadContext context) throws IOException, ExperimentException { - return new AssayRunAsyncContext(context); + return new AssayRunAsyncContext<>(context); } @Override @@ -1698,16 +1692,6 @@ public Pair getAssayResultRowIdFromLsid(Container containe return Pair.of(protocol, rowId); } - @Override - public @Nullable ActionURL getResultRowURL(Container container, Lsid lsid) - { - var pair = getAssayResultRowIdFromLsid(container, lsid); - if (pair == null) - return null; - - return PageFlowUtil.urlProvider(AssayUrls.class).getAssayResultRowURL(this, container, pair.first, pair.second); - } - @Override public boolean supportsFlagColumnType(ExpProtocol.AssayDomainTypes type) { @@ -2141,10 +2125,7 @@ private void updateDataFileUrl(List runs, Container sourceContainer, Con AuditLogService.get().addEvent(user, event); } } - catch (Exception e) - { - - } + catch (Exception ignored) {} } } @@ -2152,7 +2133,7 @@ protected void moveAssayResults(List runs, ExpProtocol protocol, Contain { String tableName = AssayProtocolSchema.DATA_TABLE_NAME; AssaySchema schema = createProtocolSchema(user, targetContainer, protocol, null); - FilteredTable assayResultTable = (FilteredTable) schema.getTable(tableName); + FilteredTable assayResultTable = (FilteredTable) schema.getTable(tableName); if (assayResultTable == null) return; @@ -2161,7 +2142,7 @@ protected void moveAssayResults(List runs, ExpProtocol protocol, Contain record AssayFileMoveReference(String sourceFilePath, File updatedFile, String runName, String fieldName) {} - private void updateResultFiles(FilteredTable assayResultTable, List runs, ExpProtocol assayProtocol, Container sourceContainer, Container targetContainer, User user, AssayMoveData assayMoveData) throws ExperimentException + private void updateResultFiles(FilteredTable assayResultTable, List runs, ExpProtocol assayProtocol, Container sourceContainer, Container targetContainer, User user, AssayMoveData assayMoveData) throws ExperimentException { FileContentService fileContentService = FileContentService.get(); if (fileContentService == null) diff --git a/api/src/org/labkey/api/assay/AssayProvider.java b/api/src/org/labkey/api/assay/AssayProvider.java index d94874809db..0bb5ce2cc5c 100644 --- a/api/src/org/labkey/api/assay/AssayProvider.java +++ b/api/src/org/labkey/api/assay/AssayProvider.java @@ -297,7 +297,7 @@ enum Scope */ DataExchangeHandler createDataExchangeHandler(); /** Make a context that knows how to update a run that's already been stored in the database */ - AssayRunDatabaseContext createRunDatabaseContext(ExpRun run, User user, HttpServletRequest request); + AssayRunDatabaseContext createRunDatabaseContext(ExpRun run, User user, HttpServletRequest request); /** * Make a context that knows how to do the import in the background, on a separate thread * (and therefore detached from the HTTP request that might have spawned it) @@ -322,11 +322,6 @@ enum Scope @Nullable Pair getAssayResultRowIdFromLsid(Container container, Lsid assayResultRowLsid); - /** - * Get the URL for an assay result row's LSID. - */ - @Nullable ActionURL getResultRowURL(Container container, Lsid lsid); - /** * Return a SQL pattern that can be used to match a protocol's LSID to this AssayProvider. * The pattern must match a protocol's LSID in the same manner as diff --git a/api/src/org/labkey/api/assay/DefaultAssayRunCreator.java b/api/src/org/labkey/api/assay/DefaultAssayRunCreator.java index f48283bf482..fdcbef17b5b 100644 --- a/api/src/org/labkey/api/assay/DefaultAssayRunCreator.java +++ b/api/src/org/labkey/api/assay/DefaultAssayRunCreator.java @@ -220,9 +220,9 @@ private ExpExperiment saveExperimentRunAsync(AssayRunUploadContext // Choose another file as the primary primaryFile = context.getUploadedData().entrySet().iterator().next().getValue(); } - primaryFile = Objects.requireNonNull(primaryFile); + Objects.requireNonNull(primaryFile); AssayRunAsyncContext asyncContext = context.getProvider().createRunAsyncContext(context); - final AssayUploadPipelineJob pipelineJob = new AssayUploadPipelineJob( + final AssayUploadPipelineJob pipelineJob = new AssayUploadPipelineJob<>( asyncContext, info, batch, diff --git a/api/src/org/labkey/api/assay/transform/AnalysisScript.java b/api/src/org/labkey/api/assay/transform/AnalysisScript.java index e43020d4cd1..9c4776ecb97 100644 --- a/api/src/org/labkey/api/assay/transform/AnalysisScript.java +++ b/api/src/org/labkey/api/assay/transform/AnalysisScript.java @@ -40,14 +40,7 @@ private AnalysisScript(File script, List operations) private AnalysisScript(File script) { - try - { - _script = FileSystemLike.wrapFile(script.getParentFile(), script); - } - catch (IOException e) - { - throw UnexpectedException.wrap(e); - } + _script = new FileSystemLike.Builder(script).build().getRoot(); } public FileLike getScript() diff --git a/api/src/org/labkey/api/cloud/CloudArchiveImporterSupport.java b/api/src/org/labkey/api/cloud/CloudArchiveImporterSupport.java index a55bfee6af7..856eb9837f1 100644 --- a/api/src/org/labkey/api/cloud/CloudArchiveImporterSupport.java +++ b/api/src/org/labkey/api/cloud/CloudArchiveImporterSupport.java @@ -26,7 +26,7 @@ public interface CloudArchiveImporterSupport default void downloadCloudArchive(@NotNull PipelineJob job, @NotNull Path studyXml, BindException errors) throws UnsupportedOperationException { //check if cloud based pipeline root, and study xml hasn't been downloaded already - if (!studyXml.startsWith(job.getPipeRoot().getImportDirectory().toPath().toAbsolutePath())) + if (!studyXml.startsWith(job.getPipeRoot().getImportDirectory().toNioPathForRead().toAbsolutePath())) { if (CloudStoreService.get() != null) //proxy of is Cloud Module enabled for the current job/container { diff --git a/api/src/org/labkey/api/cloud/CloudStoreService.java b/api/src/org/labkey/api/cloud/CloudStoreService.java index 6d517d6fabb..7803bee33f3 100644 --- a/api/src/org/labkey/api/cloud/CloudStoreService.java +++ b/api/src/org/labkey/api/cloud/CloudStoreService.java @@ -23,6 +23,8 @@ import org.labkey.api.services.ServiceRegistry; import org.labkey.api.util.Pair; import org.labkey.api.webdav.WebdavResource; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.nio.file.Path; import java.util.Collection; @@ -141,6 +143,17 @@ default Collection getEnabledCloudStores(Container container, boolean ex @Nullable Path getPath(Container container, String storeName, org.labkey.api.util.Path path); + /** + * Return nio.Path to cloud file/directory + */ + @Nullable + FileLike getFileLike(Container container, String storeName, org.labkey.api.util.Path path); + + /** + * Return FileSystem for cloud root + */ + public @Nullable FileSystemLike getFileSystemLike(Container container, String configName); + /** * Return path relative to cloud store */ diff --git a/api/src/org/labkey/api/data/TSVGridWriter.java b/api/src/org/labkey/api/data/TSVGridWriter.java index a678c886d39..70f54014457 100644 --- a/api/src/org/labkey/api/data/TSVGridWriter.java +++ b/api/src/org/labkey/api/data/TSVGridWriter.java @@ -22,10 +22,9 @@ import org.jetbrains.annotations.Nullable; import org.labkey.api.collections.ResultSetRowMapFactory; import org.labkey.api.query.FieldKey; -import org.labkey.api.util.FileUtil; import org.labkey.api.view.HttpView; +import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; @@ -198,7 +197,7 @@ private int writeBody(Results results) * @return List of the output Files. */ @NotNull - public List writeBatchFiles(@NotNull File outputDir, @NotNull String baseName, @Nullable String extension, int batchSize, @Nullable FieldKey batchColumn) + public List writeBatchFiles(@NotNull FileLike outputDir, @NotNull String baseName, @Nullable String extension, int batchSize, @Nullable FieldKey batchColumn) { extension = StringUtils.trimToEmpty(extension); String ext = "".equals(extension) || extension.startsWith(".") ? extension : "." + extension; @@ -211,13 +210,13 @@ public List writeBatchFiles(@NotNull File outputDir, @NotNull String baseN } @NotNull - private List writeResultSetBatches(Results results, File outputDir, String baseName, String extension, int batchSize, @Nullable FieldKey batchColumn) throws IOException + private List writeResultSetBatches(Results results, FileLike outputDir, String baseName, String extension, int batchSize, @Nullable FieldKey batchColumn) throws IOException { int currentBatchSize = 0; int totalBatches = 1; Object previousBatchColumnValue = null; Object newBatchColumnValue; - List outputFiles = new ArrayList<>(); + List outputFiles = new ArrayList<>(); outputFiles.add(startBatchFile(outputDir, baseName, extension, batchSize, totalBatches)); RenderContext ctx = getRenderContext(); ctx.setResults(results); @@ -264,11 +263,11 @@ private List writeResultSetBatches(Results results, File outputDir, String } @NotNull - private File startBatchFile(File outputDir, String baseName, String extension, int batchSize, int totalBatches) throws IOException + private FileLike startBatchFile(FileLike outputDir, String baseName, String extension, int batchSize, int totalBatches) throws IOException { String batchId = batchSize == 0 ? "" : "-" + totalBatches; - File file = FileUtil.appendName(outputDir, baseName + batchId + extension); - prepare(file); + FileLike file = outputDir.resolveChild(baseName + batchId + extension); + prepare(file.openOutputStream()); writeFileHeader(); if (isHeaderRowVisible()) writeColumnHeaders(); diff --git a/api/src/org/labkey/api/exp/AbstractFileXarSource.java b/api/src/org/labkey/api/exp/AbstractFileXarSource.java index 1b57d449e29..b52b70d40c3 100644 --- a/api/src/org/labkey/api/exp/AbstractFileXarSource.java +++ b/api/src/org/labkey/api/exp/AbstractFileXarSource.java @@ -27,6 +27,7 @@ import org.labkey.api.util.FileUtil; import org.labkey.api.util.NetworkDrive; import org.labkey.api.util.XmlBeansUtil; +import org.labkey.vfs.FileLike; import java.io.IOException; import java.io.InputStream; @@ -42,9 +43,9 @@ */ public abstract class AbstractFileXarSource extends XarSource { - protected Path _xmlFile; + protected FileLike _xmlFile; - protected Path getXmlFile() + protected FileLike getXmlFile() { return _xmlFile; } @@ -77,7 +78,7 @@ public ExperimentArchiveDocument getDocument() throws XmlException, IOException try { NetworkDrive.exists(getXmlFile()); - fIn = Files.newInputStream(getXmlFile()); + fIn = getXmlFile().openInputStream(); return ExperimentArchiveDocument.Factory.parse(fIn, XmlBeansUtil.getDefaultParseOptions()); } finally @@ -88,9 +89,7 @@ public ExperimentArchiveDocument getDocument() throws XmlException, IOException { fIn.close(); } - catch (IOException e) - { - } + catch (IOException ignored) {} } } } @@ -99,7 +98,7 @@ public ExperimentArchiveDocument getDocument() throws XmlException, IOException @Nullable public Path getRootPath() { - return null != getXmlFile()? getXmlFile().getParent(): null; + return null != getXmlFile()? getXmlFile().toNioPathForRead().getParent(): null; } @Override @@ -137,15 +136,15 @@ public String canonicalizeDataFileURL(String dataFileURL) } } - public static Path getLogFileFor(Path f) throws IOException + public static FileLike getLogFileFor(FileLike f) throws IOException { - Path xarDirectory = f.getParent(); - if (!Files.exists(xarDirectory)) + FileLike xarDirectory = f.getParent(); + if (!xarDirectory.exists()) { throw new IOException("Xar file parent directory does not exist"); } - String xarShortName = f.getFileName().toString(); + String xarShortName = f.getName(); int index = xarShortName.toLowerCase().lastIndexOf(".xml"); if (index == -1) { @@ -157,6 +156,6 @@ public static Path getLogFileFor(Path f) throws IOException xarShortName = xarShortName.substring(0, index); } - return xarDirectory.resolve(xarShortName + LOG_FILE_NAME_SUFFIX); + return xarDirectory.resolveChild(xarShortName + LOG_FILE_NAME_SUFFIX); } } diff --git a/api/src/org/labkey/api/exp/FileXarSource.java b/api/src/org/labkey/api/exp/FileXarSource.java index d518701264e..ccdcc593f1c 100644 --- a/api/src/org/labkey/api/exp/FileXarSource.java +++ b/api/src/org/labkey/api/exp/FileXarSource.java @@ -19,6 +19,7 @@ import org.jetbrains.annotations.Nullable; import org.labkey.api.data.Container; import org.labkey.api.pipeline.PipelineJob; +import org.labkey.vfs.FileLike; import java.io.IOException; import java.nio.file.Path; @@ -30,25 +31,25 @@ */ public class FileXarSource extends AbstractFileXarSource { - public FileXarSource(Path file, PipelineJob job) + public FileXarSource(FileLike file, PipelineJob job) { super(job); - _xmlFile = file.normalize(); + _xmlFile = file; } - public FileXarSource(Path file, PipelineJob job, Container targetContainer, @Nullable Map substitutions) + public FileXarSource(FileLike file, PipelineJob job, Container targetContainer, @Nullable Map substitutions) { super(job.getDescription(), targetContainer, job.getUser(), job, substitutions); _xmlFile = file; } - public FileXarSource(Path file, PipelineJob job, Container targetContainer) + public FileXarSource(FileLike file, PipelineJob job, Container targetContainer) { this(file, job, targetContainer, null); } @Override - public Path getLogFilePath() throws IOException + public FileLike getLogFilePath() throws IOException { return getLogFileFor(_xmlFile); } diff --git a/api/src/org/labkey/api/exp/XarContext.java b/api/src/org/labkey/api/exp/XarContext.java index cc5fbfbede4..1c368d4935f 100644 --- a/api/src/org/labkey/api/exp/XarContext.java +++ b/api/src/org/labkey/api/exp/XarContext.java @@ -29,6 +29,8 @@ import org.labkey.api.settings.AppProps; import org.labkey.api.util.GUID; import org.labkey.api.util.NetworkDrive; +import org.labkey.api.util.Path; +import org.labkey.vfs.FileLike; import java.io.File; import java.net.URI; diff --git a/api/src/org/labkey/api/exp/XarSource.java b/api/src/org/labkey/api/exp/XarSource.java index 084b367f7cd..b3e4475bfe4 100644 --- a/api/src/org/labkey/api/exp/XarSource.java +++ b/api/src/org/labkey/api/exp/XarSource.java @@ -32,6 +32,7 @@ import org.labkey.api.pipeline.PipelineJob; import org.labkey.api.security.User; import org.labkey.api.util.FileUtil; +import org.labkey.vfs.FileLike; import java.io.IOException; import java.io.Serializable; @@ -122,7 +123,7 @@ public final String getCanonicalDataFileURL(String dataFileURL) throws XarFormat protected abstract String canonicalizeDataFileURL(String dataFileURL) throws XarFormatException; - public abstract Path getLogFilePath() throws IOException; + public abstract FileLike getLogFilePath() throws IOException; /** * Called before trying to import this XAR to let the source set up any resources that are required diff --git a/api/src/org/labkey/api/exp/api/ExperimentService.java b/api/src/org/labkey/api/exp/api/ExperimentService.java index bdf36dc10f8..f1ca905d090 100644 --- a/api/src/org/labkey/api/exp/api/ExperimentService.java +++ b/api/src/org/labkey/api/exp/api/ExperimentService.java @@ -987,7 +987,7 @@ List getExpProtocolsWithParameterValue( * * @return the job responsible for doing the work */ - PipelineJob importXarAsync(ViewBackgroundInfo info, File file, String description, PipeRoot root) throws IOException; + PipelineJob importXarAsync(ViewBackgroundInfo info, FileLike file, String description, PipeRoot root) throws IOException; /** * Loads the xar synchronously, in the context of the pipelineJob diff --git a/api/src/org/labkey/api/files/FileContentService.java b/api/src/org/labkey/api/files/FileContentService.java index 9791ba68902..ee10724bdd4 100644 --- a/api/src/org/labkey/api/files/FileContentService.java +++ b/api/src/org/labkey/api/files/FileContentService.java @@ -30,6 +30,7 @@ import org.labkey.api.services.ServiceRegistry; import org.labkey.api.util.FileUtil; import org.labkey.api.webdav.WebdavResource; +import org.labkey.vfs.FileLike; import java.io.File; import java.net.URI; @@ -340,6 +341,9 @@ enum PathType { full, serverRelative, folderRelative } @Nullable URI getWebDavUrl(@NotNull Path path, @NotNull Container container, @NotNull PathType type); + @Nullable + URI getWebDavUrl(@NotNull FileLike path, @NotNull Container container, @NotNull PathType type); + /** * Ensure an entry in the exp.data table exists for all files in the container's file root. */ diff --git a/api/src/org/labkey/api/pipeline/AnalyzeForm.java b/api/src/org/labkey/api/pipeline/AnalyzeForm.java index 9a69e371a9c..0fe86132e9a 100644 --- a/api/src/org/labkey/api/pipeline/AnalyzeForm.java +++ b/api/src/org/labkey/api/pipeline/AnalyzeForm.java @@ -21,6 +21,7 @@ import org.labkey.api.security.User; import org.labkey.api.util.FileType; import org.labkey.api.util.FileUtil; +import org.labkey.vfs.FileLike; import java.nio.file.Path; @@ -64,7 +65,7 @@ public AnalyzeForm(Container container, User user, String taskId, String protoco setProtocolName(protocolName); } - public void initStatus(AbstractFileAnalysisProtocol protocol, Path dirData, Path dirAnalysis) + public void initStatus(AbstractFileAnalysisProtocol protocol, FileLike dirData, FileLike dirAnalysis) { if (fileInputStatus != null) return; @@ -80,7 +81,7 @@ public void initStatus(AbstractFileAnalysisProtocol protocol, Path dirData, Path fileInputStatus[len] = initStatusFile(protocol, dirData, dirAnalysis, null, false); } - private String initStatusFile(AbstractFileAnalysisProtocol protocol, Path dirData, Path dirAnalysis, + private String initStatusFile(AbstractFileAnalysisProtocol protocol, FileLike dirData, FileLike dirAnalysis, String fileInputName, boolean statusSingle) { if (protocol == null) @@ -88,7 +89,7 @@ private String initStatusFile(AbstractFileAnalysisProtocol protocol, Path dirDat return UNKNOWN_STATUS; } - Path fileStatus = null; + FileLike fileStatus = null; if (!statusSingle) { @@ -97,7 +98,7 @@ private String initStatusFile(AbstractFileAnalysisProtocol protocol, Path dirDat } else if (fileInputName != null) { - Path fileInput = FileUtil.appendName(dirData, fileInputName); + FileLike fileInput = dirData.resolveChild(fileInputName); FileType ft = protocol.findInputType(fileInput); if (ft != null) fileStatus = PipelineJob.FT_LOG.newFile(dirAnalysis, ft.getBaseName(fileInput)); diff --git a/api/src/org/labkey/api/pipeline/LocalDirectory.java b/api/src/org/labkey/api/pipeline/LocalDirectory.java index f537ce2e94f..68c67ed130f 100644 --- a/api/src/org/labkey/api/pipeline/LocalDirectory.java +++ b/api/src/org/labkey/api/pipeline/LocalDirectory.java @@ -48,24 +48,23 @@ public class LocalDirectory implements Serializable private final Path _remoteDir; private Path _logFile; private final String _baseLogFileName; - private final String _moduleName; - public static LocalDirectory create(@NotNull PipeRoot root, @NotNull String moduleName) + public static LocalDirectory create(@NotNull PipeRoot root) { - return create(root, moduleName, "dummyLogFile", root.isCloudRoot() ? "dummy" : root.getRootPath().getPath()); + return create(root, "dummyLogFile", root.isCloudRoot() ? "dummy" : root.getRootPath().getPath()); } @Deprecated //Prefer to use a Path for workingDir -- can be local or remote, but should match with root - public static LocalDirectory create(@NotNull PipeRoot root, @NotNull String moduleName, @NotNull String baseLogFileName, @NotNull String workingDir) + public static LocalDirectory create(@NotNull PipeRoot root, @NotNull String baseLogFileName, @NotNull String workingDir) { - return create(root, moduleName, baseLogFileName, Path.of(workingDir)); + return create(root, baseLogFileName, Path.of(workingDir)); } - public static LocalDirectory create(@NotNull PipeRoot root, @NotNull String moduleName, @NotNull String baseLogFileName, @NotNull Path workingDir) + public static LocalDirectory create(@NotNull PipeRoot root, @NotNull String baseLogFileName, @NotNull Path workingDir) { return !root.isCloudRoot() ? - new LocalDirectory(workingDir.toFile(), moduleName, baseLogFileName) : - new LocalDirectory(root.getContainer(), moduleName, root, baseLogFileName); + new LocalDirectory(workingDir.toFile(), baseLogFileName) : + new LocalDirectory(root.getContainer(), root, baseLogFileName); } @JsonCreator @@ -74,7 +73,6 @@ private LocalDirectory( @JsonProperty("_isTemporary") boolean isTemporary, @JsonProperty("_pipeRoot") PipeRoot pipeRoot, @JsonProperty("_baseLogFileName") String baseLogFileName, - @JsonProperty("_moduleName") String moduleName, @JsonProperty("_remoteDir") Path remoteDir) { _localDirectoryFile = localDirectoryFile; @@ -82,22 +80,20 @@ private LocalDirectory( _pipeRoot = pipeRoot; _remoteDir = remoteDir != null ? remoteDir : _pipeRoot == null ? null : _pipeRoot.getRootNioPath(); //Using _piperoot as default for backwards compatability _baseLogFileName = baseLogFileName; - _moduleName = moduleName; } // Constructor for runs and actions when pipeline root is cloud - public LocalDirectory(Container container, String moduleName, PipeRoot pipeRoot, String basename) + public LocalDirectory(Container container, PipeRoot pipeRoot, String basename) { - this(container, moduleName, pipeRoot, basename, null); + this(container, pipeRoot, basename, null); } - public LocalDirectory(Container container, String moduleName, PipeRoot pipeRoot, String basename, Path remoteDir) + public LocalDirectory(Container container, PipeRoot pipeRoot, String basename, Path remoteDir) { _isTemporary = true; _pipeRoot = pipeRoot; _remoteDir = remoteDir != null ? remoteDir : _pipeRoot == null ? null : _pipeRoot.getRootNioPath(); //Using _piperoot as default for backwards compatability _baseLogFileName = basename; - _moduleName = moduleName; try { @@ -113,13 +109,12 @@ public LocalDirectory(Container container, String moduleName, PipeRoot pipeRoot, } // Constructor when pipeline root not in cloud - public LocalDirectory(@NotNull File localDirectory, String moduleName, String basename) + public LocalDirectory(@NotNull File localDirectory, String basename) { _localDirectoryFile = localDirectory; _isTemporary = false; _pipeRoot = null; _baseLogFileName = basename; - _moduleName = moduleName; _remoteDir = null; } diff --git a/api/src/org/labkey/api/pipeline/PipeRoot.java b/api/src/org/labkey/api/pipeline/PipeRoot.java index d71491f9586..80138b4b122 100644 --- a/api/src/org/labkey/api/pipeline/PipeRoot.java +++ b/api/src/org/labkey/api/pipeline/PipeRoot.java @@ -25,7 +25,6 @@ import org.labkey.api.security.User; import org.labkey.api.security.permissions.Permission; import org.labkey.vfs.FileLike; -import org.labkey.vfs.FileSystemLike; import java.io.File; import java.net.URI; @@ -89,21 +88,16 @@ public interface PipeRoot extends SecurableResource @Nullable FileLike resolvePathToFileLike(String relativePath); - /** - * Get a local directory that can be used for importing (Read/Write) - * - * Cloud: Uses a temp directory - * Default: Uses folder within the file root - */ @NotNull - File getImportDirectory(); + FileLike getImportDirectory(); /** * Delete the import directory and its contents + * * @return File object for import directory * @throws DirectoryNotDeletedException if import directory exists and cannot be deleted */ - Path deleteImportDirectory(@Nullable Logger log) throws DirectoryNotDeletedException; + FileLike deleteImportDirectory(@Nullable Logger log) throws DirectoryNotDeletedException; /** @return relative path to the file from the root. null if the file isn't under the root. Does not include a leading slash */ String relativePath(File file); @@ -125,18 +119,8 @@ public interface PipeRoot extends SecurableResource /** Creates a .labkey directory if it's not present and returns it. Used for things like protocol definition files, * log files for some upgrade tasks, etc. Its contents are generally not exposed directly to the user */ - @Deprecated // prefer ensureSystemFileLike() @NotNull - File ensureSystemDirectory(); - - @Deprecated // prefer ensureSystemFileLike() - @NotNull - Path ensureSystemDirectoryPath(); - - default FileLike ensureSystemFileLike() - { - return new FileSystemLike.Builder(ensureSystemDirectory()).readwrite().root(); - } + FileLike ensureSystemDirectory(); /** @return the entityId for this pipeline root, used to store permissions */ String getEntityId(); diff --git a/api/src/org/labkey/api/pipeline/PipelineJob.java b/api/src/org/labkey/api/pipeline/PipelineJob.java index e872a09ce1f..7f3fdad9610 100644 --- a/api/src/org/labkey/api/pipeline/PipelineJob.java +++ b/api/src/org/labkey/api/pipeline/PipelineJob.java @@ -1,2018 +1,2031 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.pipeline; - -import com.fasterxml.jackson.annotation.JsonAutoDetect; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.PropertyAccessor; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.databind.module.SimpleModule; -import datadog.trace.api.CorrelationIdentifier; -import datadog.trace.api.Trace; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.Marker; -import org.apache.logging.log4j.ThreadContext; -import org.apache.logging.log4j.message.Message; -import org.apache.logging.log4j.simple.SimpleLogger; -import org.apache.logging.log4j.util.PropertiesUtil; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.action.NullSafeBindException; -import org.labkey.api.assay.AssayFileWriter; -import org.labkey.api.data.Container; -import org.labkey.api.exp.api.ExpRun; -import org.labkey.api.gwt.client.util.PropertyUtil; -import org.labkey.api.pipeline.file.FileAnalysisJobSupport; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.QueryKey; -import org.labkey.api.query.SchemaKey; -import org.labkey.api.reader.Readers; -import org.labkey.api.security.User; -import org.labkey.api.util.DateUtil; -import org.labkey.api.util.ExceptionUtil; -import org.labkey.api.util.FileType; -import org.labkey.api.util.FileUtil; -import org.labkey.api.util.GUID; -import org.labkey.api.util.Job; -import org.labkey.api.util.JsonUtil; -import org.labkey.api.util.NetworkDrive; -import org.labkey.api.util.QuietCloser; -import org.labkey.api.util.URLHelper; -import org.labkey.api.util.logging.LogHelper; -import org.labkey.api.view.ActionURL; -import org.labkey.api.view.ViewBackgroundInfo; -import org.labkey.api.writer.ContainerUser; -import org.labkey.api.writer.PrintWriters; -import org.labkey.remoteapi.query.Filter; -import org.quartz.CronExpression; - -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.PrintWriter; -import java.io.Serializable; -import java.net.URI; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardOpenOption; -import java.sql.Time; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; - -/** - * A job represents the invocation of a pipeline on a certain set of inputs. It can be monolithic (a single run() method) - * or be comprised of multiple tasks ({@link Task}) that can be checkpointed and restarted individually. - */ -@JsonIgnoreProperties(value={"_logFilePathName"}, allowGetters = true) //Property removed. Added here for backwards compatibility -abstract public class PipelineJob extends Job implements Serializable, ContainerUser -{ - public static final FileType FT_LOG = new FileType(Arrays.asList(".log"), ".log", Arrays.asList("text/plain")); - - public static final String PIPELINE_EMAIL_ADDRESS_PARAM = "pipeline, email address"; - public static final String PIPELINE_USERNAME_PARAM = "pipeline, username"; - public static final String PIPELINE_PROTOCOL_NAME_PARAM = "pipeline, protocol name"; - public static final String PIPELINE_PROTOCOL_DESCRIPTION_PARAM = "pipeline, protocol description"; - public static final String PIPELINE_LOAD_FOLDER_PARAM = "pipeline, load folder"; - public static final String PIPELINE_JOB_INFO_PARAM = "pipeline, jobInfo"; - public static final String PIPELINE_TASK_INFO_PARAM = "pipeline, taskInfo"; - public static final String PIPELINE_TASK_OUTPUT_PARAMS_PARAM = "pipeline, taskOutputParams"; - - protected static Logger _log = LogHelper.getLogger(PipelineJob.class, "Execution and queuing of pipeline jobs"); - // Send start/stop messages to a separate logger because the default logger for this class is set to - // only write ERROR level events to the system log - private static final Logger _logJobStopStart = LogManager.getLogger(Job.class); - - public static Logger getJobLogger(Class clazz) - { - return LogManager.getLogger(PipelineJob.class.getName() + ".." + clazz.getName()); - } - - public RecordedActionSet getActionSet() - { - return _actionSet; - } - - /** - * Clear out the set of recorded actions - * @param run run that represents the previous set of recorded actions - */ - public void clearActionSet(ExpRun run) - { - _actionSet = new RecordedActionSet(); - } - - public enum TaskStatus - { - /** Job is in the queue, waiting for its turn to run */ - waiting - { - @Override - public boolean isActive() { return true; } - - @Override - public boolean matches(String statusText) - { - if (statusText == null) - return false; - else if (!TaskStatus.splitWaiting.matches(statusText) && statusText.toLowerCase().endsWith("waiting")) - return true; - return super.matches(statusText); - } - }, - /** Job is doing its work */ - running - { - @Override - public boolean isActive() { return true; } - }, - /** Terminal state, job is finished and completed without errors */ - complete - { - @Override - public boolean isActive() { return false; } - }, - /** Terminal state (but often retryable), job is done running and completed with error(s) */ - error - { - @Override - public boolean isActive() { return false; } - }, - /** Job is in the process of being cancelled, but may still be running or queued at the moment */ - cancelling - { - @Override - public boolean isActive() { return true; } - }, - /** Terminal state, indicating that a user cancelled the job before it completed or errored */ - cancelled - { - @Override - public boolean isActive() { return false; } - }, - splitWaiting - { - @Override - public boolean isActive() { return false; } - - @Override - public String toString() { return "SPLIT WAITING"; } - }; - - /** @return whether this step is considered to be actively running */ - public abstract boolean isActive(); - - public String toString() - { - return super.toString().toUpperCase(); - } - - public boolean matches(String statusText) - { - return toString().equalsIgnoreCase(statusText); - } - - public final String getNotificationType() - { - return getClass().getName() + "." + name(); - } - } - - /** - * Implements a runnable to complete a part of the - * processing associated with a particular PipelineJob. This is often the execution of an external tool, - * the importing of files into the database, etc. - */ - abstract static public class Task - { - private final PipelineJob _job; - protected FactoryType _factory; - - public Task(FactoryType factory, PipelineJob job) - { - _job = job; - _factory = factory; - } - - public PipelineJob getJob() - { - return _job; - } - - /** - * Do the work of the task. The task should not set the status of the job to complete - this will be handled - * by the caller. - * @return the files used as inputs and generated as outputs, and the steps that operated on them - * @throws PipelineJobException if something went wrong during the execution of the job. The caller will - * handle setting the job's status to ERROR. - */ - @NotNull - public abstract RecordedActionSet run() throws PipelineJobException; - } - - /* - * JMS message header names - */ - private static final String HEADER_PREFIX = "LABKEY_"; - public static final String LABKEY_JOBTYPE_PROPERTY = HEADER_PREFIX + "JOBTYPE"; - public static final String LABKEY_JOBID_PROPERTY = HEADER_PREFIX + "JOBID"; - public static final String LABKEY_CONTAINERID_PROPERTY = HEADER_PREFIX + "CONTAINERID"; - public static final String LABKEY_TASKPIPELINE_PROPERTY = HEADER_PREFIX + "TASKPIPELINE"; - public static final String LABKEY_TASKID_PROPERTY = HEADER_PREFIX + "TASKID"; - public static final String LABKEY_TASKSTATUS_PROPERTY = HEADER_PREFIX + "TASKSTATUS"; - /** The execution location to which the job's current task is assigned */ - public static final String LABKEY_LOCATION_PROPERTY = HEADER_PREFIX + "LOCATION"; - - private String _provider; - private ViewBackgroundInfo _info; - private String _jobGUID; - private String _parentGUID; - private TaskId _activeTaskId; - @NotNull - private TaskStatus _activeTaskStatus; - private int _activeTaskRetries; - @NotNull - private PipeRoot _pipeRoot; - volatile private boolean _interrupted; - private boolean _submitted; - private int _errors; - private RecordedActionSet _actionSet = new RecordedActionSet(); - - private String _loggerLevel = Level.DEBUG.toString(); - - // Don't save these - protected transient Logger _logger; - private transient boolean _settingStatus; - private transient PipelineQueue _queue; - - private Path _logFile; - private LocalDirectory _localDirectory; - - // Default constructor for serialization - protected PipelineJob() - { - } - - /** Although having a null provider is legal, it is recommended that one be used - * so that it can respond to events as needed */ - public PipelineJob(@Nullable String provider, ViewBackgroundInfo info, @NotNull PipeRoot root) - { - _info = info; - _provider = provider; - _jobGUID = GUID.makeGUID(); - _activeTaskStatus = TaskStatus.waiting; - - - _pipeRoot = root; - - _actionSet = new RecordedActionSet(); - } - - public PipelineJob(PipelineJob job) - { - // Not yet queued - _queue = null; - - // New ID - _jobGUID = GUID.makeGUID(); - - // Copy everything else - _info = job._info; - _provider = job._provider; - _parentGUID = job._jobGUID; - _pipeRoot = job._pipeRoot; - _interrupted = job._interrupted; - _submitted = job._submitted; - _errors = job._errors; - _loggerLevel = job._loggerLevel; - _logger = job._logger; - _logFile = job._logFile; - - _activeTaskId = job._activeTaskId; - _activeTaskStatus = job._activeTaskStatus; - - _actionSet = new RecordedActionSet(job.getActionSet()); - _localDirectory = job._localDirectory; - } - - public String getProvider() - { - return _provider; - } - - @Deprecated - public void setProvider(String provider) - { - _provider = provider; - } - - public int getErrors() - { - return _errors; - } - - public void setErrors(int errors) - { - if (errors > 0) - _activeTaskStatus = TaskStatus.error; - - _errors = errors; - } - - /** - * This job has been restored from a checkpoint for the purpose of - * a retry. Record retry information before it is checkpointed again. - */ - public void retryUpdate() - { - _errors++; - _activeTaskRetries++; - } - - public Map getParameters() - { - return Collections.emptyMap(); - } - - public String getJobGUID() - { - return _jobGUID; - } - - public String getParentGUID() - { - return _parentGUID; - } - - @Nullable - public TaskId getActiveTaskId() - { - return _activeTaskId; - } - - public boolean setActiveTaskId(@Nullable TaskId activeTaskId) - { - return setActiveTaskId(activeTaskId, true); - } - - public boolean setActiveTaskId(@Nullable TaskId activeTaskId, boolean updateStatus) - { - if (activeTaskId == null || !activeTaskId.equals(_activeTaskId)) - { - _activeTaskId = activeTaskId; - _activeTaskRetries = 0; - } - if (_activeTaskId == null) - _activeTaskStatus = TaskStatus.complete; - else - _activeTaskStatus = TaskStatus.waiting; - - return !updateStatus || updateStatusForTask(); - } - - @NotNull - public TaskStatus getActiveTaskStatus() - { - return _activeTaskStatus; - } - - /** @return whether the status was set successfully */ - public boolean setActiveTaskStatus(@NotNull TaskStatus activeTaskStatus) - { - _activeTaskStatus = activeTaskStatus; - return updateStatusForTask(); - } - - public TaskFactory getActiveTaskFactory() - { - if (getActiveTaskId() == null) - return null; - - return PipelineJobService.get().getTaskFactory(getActiveTaskId()); - } - - @NotNull - public PipeRoot getPipeRoot() - { - return _pipeRoot; - } - - @Deprecated //Please switch to the Path version - public void setLogFile(File logFile) - { - setLogFile(logFile.toPath()); - } - - public void setLogFile(Path logFile) - { - // Set Log file path and clear/reset logger - _logFile = logFile.toAbsolutePath().normalize(); - _logger = null; //This should trigger getting the new Logger next time getLogger is called - } - - public File getLogFile() - { - Path logFilePath = getLogFilePath(); - if (null != logFilePath && !FileUtil.hasCloudScheme(logFilePath)) - return logFilePath.toFile(); - return null; - } - - public Path getLogFilePath() - { - return _logFile; - } - - /** - * Get the remote log path (if local dir set) else return getLogFilePath - * - * TODO: Better name getStatusKeyPath? or similar - */ - public Path getRemoteLogPath() - { - LocalDirectory dir = getLocalDirectory(); - if (dir == null) - return getLogFilePath(); - - return dir.getRemoteLogFilePath(); - } - - /** Finds a file name that hasn't been used yet, appending ".2", ".3", etc as needed */ - public static File findUniqueLogFile(File primaryFile, String baseName) - { - String validBaseName = FileUtil.makeLegalName(baseName); - // need to look in current and archived dirs for any unused log file names (issue 20987) - File fileLog = FT_LOG.newFile(primaryFile.getParentFile(), validBaseName); - File archivedDir = FileUtil.appendName(primaryFile.getParentFile(), AssayFileWriter.ARCHIVED_DIR_NAME); - File fileLogArchived = FT_LOG.newFile(archivedDir, validBaseName); - - int index = 1; - while (NetworkDrive.exists(fileLog) || NetworkDrive.exists(fileLogArchived)) - { - fileLog = FT_LOG.newFile(primaryFile.getParentFile(), validBaseName + "." + (index)); - fileLogArchived = FT_LOG.newFile(archivedDir, validBaseName + "." + (index++)); - } - - return fileLog; - } - - - public LocalDirectory getLocalDirectory() - { - return _localDirectory; - } - - protected void setLocalDirectory(LocalDirectory localDirectory) - { - _localDirectory = localDirectory; - } - - public static PipelineJob readFromFile(File file) throws IOException, PipelineJobException - { - StringBuilder serializedJob = new StringBuilder(); - try (InputStream fIn = new FileInputStream(file)) - { - BufferedReader reader = Readers.getReader(fIn); - String line; - while ((line = reader.readLine()) != null) - { - serializedJob.append(line); - } - } - - PipelineJob job = PipelineJob.deserializeJob(serializedJob.toString()); - if (null == job) - { - throw new PipelineJobException("Unable to deserialize job"); - } - return job; - } - - - public void writeToFile(File file) throws IOException - { - File newFile = new File(file.getPath() + ".new"); - File origFile = new File(file.getPath() + ".orig"); - - String serializedJob = serializeJob(true); - - try (FileOutputStream fOut = new FileOutputStream(newFile)) - { - PrintWriter writer = PrintWriters.getPrintWriter(fOut); - writer.write(serializedJob); - writer.flush(); - } - - if (NetworkDrive.exists(file)) - { - if (origFile.exists()) - { - // Might be left over from some bad previous run - origFile.delete(); - } - // Don't use File.renameTo() because it doesn't always work depending on the underlying file system - FileUtils.moveFile(file, origFile); - FileUtils.moveFile(newFile, file); - origFile.delete(); - } - else - { - FileUtils.moveFile(newFile, file); - } - PipelineJobService.get().getWorkDirFactory().setPermissions(file); - } - - public boolean updateStatusForTask() - { - TaskFactory factory = getActiveTaskFactory(); - TaskStatus status = getActiveTaskStatus(); - - if (factory != null && !TaskStatus.error.equals(status) && !TaskStatus.cancelled.equals(status)) - return setStatus(factory.getStatusName() + " " + status.toString().toUpperCase()); - else - return setStatus(status); - } - - /** Used for setting status to one of the standard states */ - public boolean setStatus(@NotNull TaskStatus status) - { - return setStatus(status.toString()); - } - - /** - * Used for setting status to a custom state, which is considered to be equivalent to TaskStatus.running - * unless it matches one of the standard states - * @throws CancelledException if the job was cancelled by a user and should stop execution - */ - public boolean setStatus(@NotNull String status) - { - return setStatus(status, null); - } - - /** - * Used for setting status to one of the standard states - * @param info more verbose detail on the job's status, such as a percent complete - * @throws CancelledException if the job was cancelled by a user and should stop execution - */ - public boolean setStatus(@NotNull TaskStatus status, @Nullable String info) - { - return setStatus(status.toString(), info); - } - - /** - * @param info more verbose detail on the job's status, such as a percent complete - * @throws CancelledException if the job was cancelled by a user and should stop execution - */ - public boolean setStatus(@NotNull String status, @Nullable String info) - { - return setStatus(status, info, false); - } - - /** - * Used for setting status to a custom state, which is considered to be equivalent to TaskStatus.running - * unless it matches one of the standard states - * @throws CancelledException if the job was cancelled by a user and should stop execution - */ - public boolean setStatus(@NotNull String status, @Nullable String info, boolean allowInsert) - { - if (_settingStatus) - return true; - - _settingStatus = true; - try - { - boolean statusSet = PipelineJobService.get().getStatusWriter().setStatus(this, status, info, allowInsert); - if (!statusSet) - { - setActiveTaskStatus(TaskStatus.error); - } - return statusSet; - } - // Rethrow so it doesn't get handled like other RuntimeExceptions - catch (CancelledException e) - { - _activeTaskStatus = TaskStatus.cancelled; - throw e; - } - catch (RuntimeException e) - { - Path f = this.getLogFilePath(); - error("Failed to set status to '" + status + "' for '" + - (f == null ? "" : f.toString()) + "'.", e); - throw e; - } - catch (Exception e) - { - Path f = this.getLogFilePath(); - error("Failed to set status to '" + status + "' for '" + - (f == null ? "" : f.toString()) + "'.", e); - } - finally - { - _settingStatus = false; - } - return false; - } - - public void restoreQueue(PipelineQueue queue) - { - // Recursive split and join combinations may cause the queue - // to be restored to a job with a queue already. Would be good - // to have better safe-guards against double-queueing of jobs. - if (queue == _queue) - return; - if (null != _queue) - throw new IllegalStateException(); - _queue = queue; - } - - public void restoreLocalDirectory() - { - if (null != _localDirectory) - setLogFile(_localDirectory.restore()); - } - - public void validateParameters() throws PipelineValidationException - { - TaskPipeline taskPipeline = getTaskPipeline(); - if (taskPipeline != null) - { - for (TaskId taskId : taskPipeline.getTaskProgression()) - { - TaskFactory taskFactory = PipelineJobService.get().getTaskFactory(taskId); - if (taskFactory == null) - throw new PipelineValidationException("Task '" + taskId + "' not found"); - taskFactory.validateParameters(this); - } - } - } - - public boolean setQueue(PipelineQueue queue, TaskStatus initialState) - { - return setQueue(queue, initialState.toString()); - } - - public boolean setQueue(PipelineQueue queue, String initialState) - { - restoreQueue(queue); - - // Initialize the task pipeline - TaskPipeline taskPipeline = getTaskPipeline(); - if (taskPipeline != null) - { - // Save the current job state marshalled to XML, in case of error. - String serializedJob = serializeJob(true); - - // Note runStateMachine returns false, if the job cannot be run locally. - // The job may still need to be put on a JMS queue for remote processing. - // Therefore, the return value cannot be used to determine whether the - // job should be queued. - runStateMachine(); - - // If an error occurred trying to find the first runnable state, then - // store the original job state to allow retry. - if (getActiveTaskStatus() == TaskStatus.error) - { - try - { - PipelineJob originalJob = PipelineJob.deserializeJob(serializedJob); - if (null != originalJob) - originalJob.store(); - else - warn("Failed to checkpoint '" + getDescription() + "' job."); - - } - catch (Exception e) - { - warn("Failed to checkpoint '" + getDescription() + "' job.", e); - } - return false; - } - - // If initialization put this job into a state where it is - // waiting, then it should not be put on the queue. - return !isSplitWaiting(); - } - // Initialize status for non-task pipeline jobs. - else if (_logFile != null) - { - setStatus(initialState); - try - { - store(); - } - catch (Exception e) - { - warn("Failed to checkpoint '" + getDescription() + "' job before queuing.", e); - } - } - - return true; - } - - public void clearQueue() - { - _queue = null; - } - - abstract public URLHelper getStatusHref(); - - abstract public String getDescription(); - - public String toString() - { - return super.toString() + " " + StringUtils.trimToEmpty(getDescription()); - } - - public T getJobSupport(Class inter) - { - if (inter.isInstance(this)) - return (T) this; - - throw new UnsupportedOperationException("Job type " + getClass().getName() + - " does not implement " + inter.getName()); - } - - /** - * Override to provide a TaskPipeline with the option of - * running some tasks remotely. Override the run() function - * to implement the job as a single monolithic task. - * - * @return a task pipeline to run for this job - */ - @Nullable - public TaskPipeline getTaskPipeline() - { - return null; - } - - public boolean isActiveTaskLocal() - { - TaskFactory factory = getActiveTaskFactory(); - return (factory != null && - TaskFactory.WEBSERVER.equalsIgnoreCase(factory.getExecutionLocation())); - } - - public void runActiveTask() throws IOException, PipelineJobException - { - TaskFactory factory = getActiveTaskFactory(); - if (factory == null) - return; - - if (!factory.isJobComplete(this)) - { - Task task = factory.createTask(this); - if (task == null) - return; // Bad task key. - - if (!setActiveTaskStatus(TaskStatus.running)) - { - // The user has deleted (cancelled) the job. - // Throwing this exception will cause the job to go to the ERROR state and stop running - throw new PipelineJobException("Job no longer in database - aborting"); - } - - WorkDirectory workDirectory = null; - RecordedActionSet actions; - - boolean success = false; - try - { - logStartStopInfo("Starting to run task '" + factory.getId() + "' for job '" + this + "' with log file " + getLogFilePath()); - getLogger().info("Starting to run task '" + factory.getId() + "' at location '" + factory.getExecutionLocation() + "'"); - if (PipelineJobService.get().getLocationType() != PipelineJobService.LocationType.WebServer) - { - PipelineJobService.RemoteServerProperties remoteProps = PipelineJobService.get().getRemoteServerProperties(); - if (remoteProps != null) - { - getLogger().info("on host: '" + remoteProps.getHostName() + "'"); - } - } - - if (task instanceof WorkDirectoryTask wdTask) - { - workDirectory = factory.createWorkDirectory(getJobGUID(), getJobSupport(FileAnalysisJobSupport.class), getLogger()); - wdTask.setWorkDirectory(workDirectory); - } - - actions = task.run(); - success = true; - } - finally - { - getLogger().info((success ? "Successfully completed" : "Failed to complete") + " task '" + factory.getId() + "'"); - logStartStopInfo((success ? "Successfully completed" : "Failed to complete") + " task '" + factory.getId() + "' for job '" + this + "' with log file " + getLogFile()); - - try - { - if (workDirectory != null) - { - workDirectory.remove(success); - ((WorkDirectoryTask)task).setWorkDirectory(null); - } - } - catch (IOException e) - { - // Don't let this cleanup error mask an original error that causes the job to fail - if (success) - { - // noinspection ThrowFromFinallyBlock - throw e; - } - else - { - if (e.getMessage() != null) - { - error(e.getMessage()); - } - else - { - error("Failed to clean up work directory after error condition, see full error information below.", e); - } - } - } - } - _actionSet.add(actions); - - // An error occurred running the task. Do not complete. - if (TaskStatus.error.equals(getActiveTaskStatus())) - return; - } - else - { - logStartStopInfo("Skipping already completed task '" + factory.getId() + "' for job '" + this + "' with log file " + getLogFile()); - getLogger().info("Skipping already completed task '" + factory.getId() + "' at location '" + factory.getExecutionLocation() + "'"); - } - - if (getActiveTaskStatus() != TaskStatus.complete && getActiveTaskStatus() != TaskStatus.cancelled) - setActiveTaskStatus(TaskStatus.complete); - } - - public static void logStartStopInfo(String message) - { - _logJobStopStart.info(message); - } - - public boolean runStateMachine() - { - TaskPipeline pipeline = getTaskPipeline(); - if (pipeline == null) - { - assert false : "Either override getTaskPipeline() or run() for " + getClass(); - - // Best we can do is to complete the job. - setActiveTaskId(null); - return false; - } - - TaskId[] progression = pipeline.getTaskProgression(); - int i = 0; - if (_activeTaskId != null) - { - i = indexOfActiveTask(progression); - if (i == -1) - { - error("Active task " + _activeTaskId + " not found in task pipeline."); - return false; - } - } - - switch (_activeTaskStatus) - { - case waiting: - return findRunnableTask(progression, i); - - case complete: - // See if the job has already completed. - if (_activeTaskId == null) - return false; - - return findRunnableTask(progression, i + 1); - - case error: - // Make sure the status is in error state, so that any auto-retry that - // may occur will record the error. And, if no retry occurs, then this - // job must be in error state. - try - { - PipelineJobService.get().getStatusWriter().ensureError(this); - } - catch (Exception e) - { - warn("Failed to ensure error status on task error.", e); - } - - // Run auto-retry, and retry if appropriate. - autoRetry(); - return false; - - case running: - case cancelled: - case cancelling: - default: - return false; // Do not run the active task. - } - } - - private int indexOfActiveTask(TaskId[] progression) - { - for (int i = 0; i < progression.length; i++) - { - TaskFactory factory = PipelineJobService.get().getTaskFactory(progression[i]); - if (factory == null) - { - throw new IllegalStateException("Could not find factory for " + progression[i]); - } - if (factory.getId().equals(_activeTaskId) || - factory.getActiveId(this).equals(_activeTaskId)) - return i; - } - return -1; - } - - private boolean findRunnableTask(TaskId[] progression, int i) - { - // Search for next task that is not already complete - TaskFactory factory = null; - while (i < progression.length) - { - try - { - factory = PipelineJobService.get().getTaskFactory(progression[i]); - if (factory == null) - { - throw new IllegalStateException("Could not find factory for " + progression[i]); - } - // Stop, if this task requires a change in join state - if ((factory.isJoin() && isSplitJob()) || (!factory.isJoin() && isSplittable())) - break; - // Stop, if this task is part of processing this job, and not complete - if (factory.isParticipant(this) && !factory.isJobComplete(this)) - break; - } - catch (IOException e) - { - error(e.getMessage()); - return false; - } - - i++; - } - - if (i < progression.length) - { - if (factory.isJoin() && isSplitJob()) - { - setActiveTaskId(factory.getId(), false); // ID is just a marker for state machine - join(); - return false; - } - else if (!factory.isJoin() && isSplittable()) - { - setActiveTaskId(factory.getId(), false); // ID is just a marker for state machine - split(); - return false; - } - - // Set next task to be run - if (!setActiveTaskId(factory.getActiveId(this))) - { - return false; - } - - // If it is local, then it can be run - return isActiveTaskLocal(); - } - else - { - // Job is complete - if (isSplitJob()) - { - setActiveTaskId(null, false); - join(); - } - else - { - setActiveTaskId(null); - } - return false; - } - } - - public boolean isAutoRetry() - { - TaskFactory factory = getActiveTaskFactory(); - return null != factory && _activeTaskRetries < factory.getAutoRetry() && factory.isAutoRetryEnabled(this); - } - - public boolean autoRetry() - { - try - { - if (isAutoRetry()) - { - info("Attempting to auto-retry"); - PipelineJobService.get().getJobStore().retry(getJobGUID()); - // Retry has been queued - return true; - } - } - catch (IOException | NoSuchJobException e) - { - warn("Failed to start automatic retry.", e); - } - return false; - } - - /** - * Subclasses that override this method instead of defining a task pipeline are responsible for setting the job's - * status at the end of their execution to either COMPLETE or ERROR - */ - @Override @Trace - public void run() - { - assert ThreadContext.isEmpty(); // Prevent/detect leaks - // Connect log messages with the active trace and span - ThreadContext.put(CorrelationIdentifier.getTraceIdKey(), CorrelationIdentifier.getTraceId()); - ThreadContext.put(CorrelationIdentifier.getSpanIdKey(), CorrelationIdentifier.getSpanId()); - - try - { - // The act of queueing the job runs the state machine for the first time. - do - { - try - { - runActiveTask(); - } - catch (IOException | PipelineJobException e) - { - error(e.getMessage(), e); - } - catch (CancelledException e) - { - throw e; - } - catch (RuntimeException e) - { - error(e.getMessage(), e); - ExceptionUtil.logExceptionToMothership(null, e); - // Rethrow to let the standard Mule exception handler fire and deal with the job state - throw e; - } - } - while (runStateMachine()); - } - catch (CancelledException e) - { - _activeTaskStatus = TaskStatus.cancelled; - // Don't need to do anything else, job has already been set to CANCELLED - } - finally - { - PipelineService.get().getPipelineQueue().almostDone(this); - - ThreadContext.remove(CorrelationIdentifier.getTraceIdKey()); - ThreadContext.remove(CorrelationIdentifier.getSpanIdKey()); - } - } - - // Should be called in run()'s finally by any class that overrides run(), if class uses LocalDirectory - protected void finallyCleanUpLocalDirectory() - { - if (null != _localDirectory && isDone()) - { - try - { - Path remoteLogFilePath = _localDirectory.cleanUpLocalDirectory(); - - //Update job log entry's log location to remote path - if (null != remoteLogFilePath) - { - //NOTE: any errors here can't be recorded to job log as it may no longer be local and writable - setLogFile(remoteLogFilePath); - setStatus(getActiveTaskStatus()); // Force writing to statusFiles - } - } - catch (JobLogInaccessibleException e) - { - // Can't write to job log as the log file is either null or inaccessible. - ExceptionUtil.logExceptionToMothership(null, e); - } - catch (Exception e) - { - // Attempt to record the error to the log. Move failed, so log should still be local and writable. - error("Error trying to move log file", e); - } - } - } - - /** - * Override and return true for job that may be split. Also, override - * the createSplitJobs() method to return the sub-jobs. - * - * @return true if the job may be split - */ - public boolean isSplittable() - { - return false; - } - - /** - * @return true if this is a split job, as determined by whether it has a parent. - */ - public boolean isSplitJob() - { - return getParentGUID() != null; - } - - /** - * @return true if this is a join job waiting for split jobs to complete. - */ - public boolean isSplitWaiting() - { - // Return false, if this job cannot be split. - if (!isSplittable()) - return false; - - // A join job with an active task that is not a join task, - // is waiting for a split to complete. - TaskFactory factory = getActiveTaskFactory(); - return (factory != null && !factory.isJoin()); - } - - /** - * Override and return instances of sub-jobs for a splittable job. - * - * @return sub-jobs requiring separate processing - */ - public List createSplitJobs() - { - return Collections.singletonList(this); - } - - /** - * Handles merging accumulated changes from split jobs into this job, which - * is a joined job. - * - * @param job the split job that has run to completion - */ - public void mergeSplitJob(PipelineJob job) - { - // Add experiment actions recorded. - _actionSet.add(job.getActionSet()); - - // Add any errors that happened in the split job. - _errors += job._errors; - } - - public void store() throws NoSuchJobException - { - PipelineJobService.get().getJobStore().storeJob(this); - } - - private void split() - { - try - { - PipelineJobService.get().getJobStore().split(this); - } - catch (IOException e) - { - error(e.getMessage(), e); - } - } - - private void join() - { - try - { - PipelineJobService.get().getJobStore().join(this); - } - catch (IOException | NoSuchJobException e) - { - error(e.getMessage(), e); - } - } - - ///////////////////////////////////////////////////////////////////////// - // Support for running processes - - @Nullable - private PrintWriter createPrintWriter(@Nullable File outputFile, boolean append) throws PipelineJobException - { - if (outputFile == null) - return null; - - try - { - return new PrintWriter(new BufferedWriter(new FileWriter(outputFile, append))); - } - catch (IOException e) - { - throw new PipelineJobException("Could not create the " + outputFile + " file.", e); - } - } - - public void runSubProcess(ProcessBuilder pb, File dirWork) throws PipelineJobException - { - runSubProcess(pb, dirWork, null, 0, false); - } - - /** - * If logLineInterval is greater than 1, the first logLineInterval lines of output will be written to the - * job's main log file. - */ - public void runSubProcess(ProcessBuilder pb, File dirWork, File outputFile, int logLineInterval, boolean append) - throws PipelineJobException - { - runSubProcess(pb, dirWork, outputFile, logLineInterval, append, 0, null); - } - - public void runSubProcess(ProcessBuilder pb, File dirWork, File outputFile, int logLineInterval, boolean append, long timeout, TimeUnit timeoutUnit) - throws PipelineJobException - { - Process proc; - - String commandName = pb.command().get(0); - commandName = commandName.substring( - Math.max(commandName.lastIndexOf('/'), commandName.lastIndexOf('\\')) + 1); - header(commandName + " output"); - - // Update PATH environment variable to make sure all files in the tools - // directory and the directory of the executable or on the path. - String toolDir = PipelineJobService.get().getAppProperties().getToolsDirectory(); - if (!StringUtils.isEmpty(toolDir)) - { - String path = System.getenv("PATH"); - if (path == null) - { - path = toolDir; - } - else - { - path = toolDir + File.pathSeparatorChar + path; - } - - // If the command has a path, then prepend its parent directory to the PATH - // environment variable as well. - String exePath = pb.command().get(0); - if (exePath != null && !exePath.isEmpty() && exePath.indexOf(File.separatorChar) != -1) - { - File fileExe = new File(exePath); - String exeDir = fileExe.getParent(); - if (!exeDir.equals(toolDir) && fileExe.exists()) - path = fileExe.getParent() + File.pathSeparatorChar + path; - } - - pb.environment().put("PATH", path); - - String dyld = System.getenv("DYLD_LIBRARY_PATH"); - if (dyld == null) - { - dyld = toolDir; - } - else - { - dyld = toolDir + File.pathSeparatorChar + dyld; - } - pb.environment().put("DYLD_LIBRARY_PATH", dyld); - } - - // tell more modern TPP tools to run headless (so no perl calls etc) bpratt 4-14-09 - pb.environment().put("XML_ONLY", "1"); - // tell TPP tools not to mess with tmpdirs, we handle this at higher level - pb.environment().put("WEBSERVER_TMP",""); - - try - { - pb.directory(dirWork); - - // TODO: Errors should go to log even when output is redirected to a file. - pb.redirectErrorStream(true); - - info("Working directory is " + dirWork.getAbsolutePath()); - info("running: " + StringUtils.join(pb.command().iterator(), " ")); - - proc = pb.start(); - } - catch (SecurityException se) - { - throw new PipelineJobException("Failed starting process '" + pb.command() + "'. Permissions do not allow execution.", se); - } - catch (IOException eio) - { - throw new PipelineJobException("Failed starting process '" + pb.command() + "'", eio); - } - - - try (QuietCloser ignored = PipelineJobService.get().trackForCancellation(proc)) - { - // create thread pool for collecting the process output - ExecutorService pool = Executors.newSingleThreadExecutor(); - - try (PrintWriter fileWriter = createPrintWriter(outputFile, append)) - { - // collect output using separate thread so we can enforce a timeout on the process - Future output = pool.submit(() -> { - try (BufferedReader procReader = Readers.getReader(proc.getInputStream())) - { - String line; - int count = 0; - while ((line = procReader.readLine()) != null) - { - count++; - if (fileWriter == null) - info(line); - else - { - if (logLineInterval > 0 && count < logLineInterval) - info(line); - else if (count == logLineInterval) - info("Writing additional tool output lines to " + outputFile.getName()); - fileWriter.println(line); - } - } - return count; - } - }); - - try - { - if (timeout > 0) - { - if (!proc.waitFor(timeout, timeoutUnit)) - { - proc.destroyForcibly().waitFor(); - - error("Process killed after exceeding timeout of " + timeout + " " + timeoutUnit.name().toLowerCase()); - } - } - else - { - proc.waitFor(); - } - - int result = proc.exitValue(); - if (result != 0) - { - throw new ToolExecutionException("Failed running " + pb.command().get(0) + ", exit code " + result, result); - } - - int count = output.get(); - if (fileWriter != null) - info(count + " lines written total to " + outputFile.getName()); - } - catch (InterruptedException ei) - { - throw new PipelineJobException("Interrupted process for '" + dirWork.getPath() + "'.", ei); - } - catch (ExecutionException e) - { - // Exception thrown in output collecting thread - Throwable cause = e.getCause(); - if (cause instanceof IOException) - throw new PipelineJobException("Failed writing output for process in '" + dirWork.getPath() + "'.", cause); - - throw new PipelineJobException(cause); - } - } - finally - { - pool.shutdownNow(); - } - } - } - - public String getLogLevel() - { - return _loggerLevel; - } - - public void setLogLevel(String level) - { - if (!_loggerLevel.equals(level)) - { - _loggerLevel = level; - _logger = null; // Reset the logger - } - } - - public Logger getClassLogger() - { - return _log; - } - - private static class OutputLogger extends SimpleLogger - { - private final PipelineJob _job; - private boolean _isSettingStatus; - private final Path _file; - private final String LINE_SEP = System.lineSeparator(); - private final String datePattern = "dd MMM yyyy HH:mm:ss,SSS"; - - protected OutputLogger(PipelineJob job, Path file, String name, Level level) - { - super(name, level, false, false, false, false, "", null, new PropertiesUtil(PropertiesUtil.getSystemProperties()), null); - _job = job; - _file = file; - } - - // called from LogOutputStream.flush() - @Override - public void log(Level level, String message) - { - _job.getClassLogger().log(level, message); - write(message, null, level.toString()); - } - - private String getSystemLogMessage(Object message) - { - StringBuilder sb = new StringBuilder(); - sb.append("(from pipeline job log file "); - sb.append(_job.getLogFile().toString()); - if (message != null) - { - sb.append(": "); - String stringMessage = message.toString(); - // Limit the maximum line length - final int maxLength = 10000; - if (stringMessage.length() > maxLength) - { - stringMessage = stringMessage.substring(0, maxLength) + "..."; - } - sb.append(stringMessage); - } - sb.append(")"); - return sb.toString(); - } - - public void setErrorStatus(Object message) - { - if (_isSettingStatus || _job._activeTaskStatus == TaskStatus.cancelled) - return; - - _isSettingStatus = true; - try - { - _job.setStatus(TaskStatus.error, message == null ? "ERROR" : message.toString()); - } - finally - { - _isSettingStatus = false; - } - } - - @Override - public void logMessage(String fqcn, Level mgsLevel, Marker marker, Message msg, Throwable throwable) - { - if (_job.getClassLogger().isEnabled(mgsLevel, marker)) - { - _job.getClassLogger().log(mgsLevel, marker, new Message() - { - @Override - public String getFormattedMessage() - { - return getSystemLogMessage(msg.getFormattedMessage()); - } - - @Override - public Object[] getParameters() - { - return msg.getParameters(); - } - - @Override - public Throwable getThrowable() - { - return msg.getThrowable(); - } - }, throwable); - } - - // Write to the job's log before setting the error status, which may end up throwing a CancelledException - // to signal that we need to bail out right away - write(msg.getFormattedMessage(), throwable, mgsLevel.getStandardLevel().name()); - - if (mgsLevel.isMoreSpecificThan(Level.ERROR)) - { - setErrorStatus(msg.getFormattedMessage()); - } - } - - private void write(String message, @Nullable Throwable t, String level) - { - String formattedDate = DateUtil.formatDateTime(new Date(), datePattern); - - try (PrintWriter writer = new PrintWriter(Files.newBufferedWriter(_file, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND))) - { - var line = formattedDate + " " + - String.format("%-5s", level) + - ": " + - message; - writer.write(line); - writer.write(LINE_SEP); - if (null != t) - { - t.printStackTrace(writer); - } - } - catch (IOException e) - { - Path parentFile = _file.getParent(); - if (parentFile != null && !NetworkDrive.exists(parentFile)) - { - try - { - FileUtil.createDirectories(parentFile); - write(message, t, level); - } - catch (IOException dirE) - { - _log.error("Failed appending to file. Unable to create parent directories", e); - } - } - else - _log.error("Failed appending to file.", e); - } - } - } - - public static class JobLogInaccessibleException extends IllegalStateException - { - public JobLogInaccessibleException(String message) - { - super(message); - } - } - - // Multiple threads log messages, so synchronize to make sure that no one gets a partially initialized logger - public synchronized Logger getLogger() - { - if (_logger == null) - { - if (null == _logFile || FileUtil.hasCloudScheme(_logFile)) - throw new JobLogInaccessibleException("LogFile null or cloud."); - - // Create appending logger. - String loggerName = PipelineJob.class.getSimpleName() + ".Logger." + _logFile.toString(); - _logger = new OutputLogger(this, _logFile, loggerName, Level.toLevel(_loggerLevel)); - } - - return _logger; - } - - public void error(String message) - { - error(message, null); - } - - public void error(String message, @Nullable Throwable t) - { - setErrors(getErrors() + 1); - if (getLogger() != null) - getLogger().error(message, t); - } - - public void debug(String message) - { - debug(message, null); - } - - public void debug(String message, @Nullable Throwable t) - { - if (getLogger() != null) - getLogger().debug(message, t); - } - - public void warn(String message) - { - warn(message, null); - } - - public void warn(String message, @Nullable Throwable t) - { - if (getLogger() != null) - getLogger().warn(message, t); - } - - public void info(String message) - { - info(message, null); - } - - public void info(String message, @Nullable Throwable t) - { - if (getLogger() != null) - getLogger().info(message, t); - } - - public void header(String message) - { - info(message); - info("======================================="); - } - - ///////////////////////////////////////////////////////////////////////// - // ViewBackgroundInfo access - // WARNING: Some access of ViewBackgroundInfo is not supported when - // the job is running outside the LabKey Server. - - /** - * Gets the container ID from the ViewBackgroundInfo. - * - * @return the ID for the container in which the job was started - */ - public String getContainerId() - { - return getInfo().getContainerId(); - } - - /** - * Gets the User instance from the ViewBackgroundInfo. - * WARNING: Not supported if job is not running in the LabKey web server. - * - * @return the user who started the job - * @throws IllegalStateException if invoked on a remote pipeline server - */ - @Override - public User getUser() - { - if (!PipelineJobService.get().isWebServer()) - { - throw new IllegalStateException("User lookup not available on remote pipeline servers"); - } - return getInfo().getUser(); - } - - /** - * Gets the Container instance from the ViewBackgroundInfo. - * WARNING: Not supported if job is not running in the LabKey web server. - * - * @return the container in which the job was started - * @throws IllegalStateException if invoked on a remote pipeline server - */ - @Override - public Container getContainer() - { - if (!PipelineJobService.get().isWebServer()) - { - throw new IllegalStateException("User lookup not available on remote pipeline servers"); - } - return getInfo().getContainer(); - } - - /** - * Gets the ActionURL instance from the ViewBackgroundInfo. - * WARNING: Not supported if job is not running in the LabKey Server. - * - * @return the URL of the request that started the job - */ - public ActionURL getActionURL() - { - return getInfo().getURL(); - } - - /** - * Gets the ViewBackgroundInfo associated with this job in its contstructor. - * WARNING: Although this function is supported outside the LabKey Server, certain - * accessors on the ViewBackgroundInfo itself are not. - * - * @return information from the starting request, for use in background processing - */ - public ViewBackgroundInfo getInfo() - { - return _info; - } - - ///////////////////////////////////////////////////////////////////////// - // Scheduling interface - // TODO: Figure out how these apply to the Enterprise Pipeline - - protected boolean canInterrupt() - { - return false; - } - - public synchronized boolean interrupt() - { - PipelineJobService.get().cancelForJob(getJobGUID()); - if (!canInterrupt()) - return false; - _interrupted = true; - return true; - } - - public synchronized boolean checkInterrupted() - { - return _interrupted; - } - - public boolean allowMultipleSimultaneousJobs() - { - return false; - } - - synchronized public void setSubmitted() - { - _submitted = true; - notifyAll(); - } - - synchronized private boolean isSubmitted() - { - return _submitted; - } - - synchronized private void waitUntilSubmitted() - { - while (!_submitted) - { - try - { - wait(); - } - catch (InterruptedException ignored) {} - } - } - - ///////////////////////////////////////////////////////////////////////// - // JobRunner.Job interface - - @Override - public Object get() throws InterruptedException, ExecutionException - { - waitUntilSubmitted(); - return super.get(); - } - - @Override - public Object get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException - { - return get(); - } - - @Override - protected void starting(Thread thread) - { - _queue.starting(this, thread); - } - - @Override - public boolean cancel(boolean mayInterruptIfRunning) - { - if (isSubmitted()) - { - PipelineJobService.get().cancelForJob(getJobGUID()); - return super.cancel(mayInterruptIfRunning); - } - return true; - } - - @Override - public boolean isDone() - { - if (!isSubmitted()) - return false; - return super.isDone(); - } - - @Override - public boolean isCancelled() - { - if (!isSubmitted()) - return false; - return super.isCancelled(); - } - - @Override - public void done(Throwable throwable) - { - if (null != throwable) - { - try - { - error("Uncaught exception in PipelineJob: " + this, throwable); - } - catch (Exception ignored) {} - } - if (_queue != null) - { - _queue.done(this); - } - - PipelineJobNotificationProvider notificationProvider = PipelineService.get().getPipelineJobNotificationProvider(getJobNotificationProvider(), this); - if (notificationProvider != null) - notificationProvider.onJobDone(this); - - finallyCleanUpLocalDirectory(); //Since this potentially contains the job log, it should be run after the notifications tasks are executed - } - - protected String getJobNotificationProvider() - { - return null; - } - - protected String getNotificationType(PipelineJob.TaskStatus status) - { - return status.getNotificationType(); - } - - public String serializeJob(boolean ensureDeserialize) - { - return PipelineJobService.get().getJobStore().serializeToJSON(this, ensureDeserialize); - } - - public static String getClassNameFromJson(String serialized) - { - // Expect [ "org.labkey....", {.... - if (StringUtils.startsWith(serialized, "[")) - { - return StringUtils.substringBetween(serialized, "\""); - } - else - { - throw new RuntimeException("Unexpected serialized JSON"); - } - } - - @Nullable - public static PipelineJob deserializeJob(@NotNull String serialized) - { - try - { - String className = PipelineJob.getClassNameFromJson(serialized); - return PipelineJobService.get().getJobStore().deserializeFromJSON(serialized, (Class)Class.forName(className)); - } - catch (ClassNotFoundException e) - { - _log.error("Deserialized class not found.", e); - } - return null; - } - - public static ObjectMapper createObjectMapper() - { - ObjectMapper mapper = JsonUtil.DEFAULT_MAPPER.copy() - .setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.NONE) - .setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY) - .disable(SerializationFeature.FAIL_ON_EMPTY_BEANS) - .enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL); - - SimpleModule module = new SimpleModule(); - module.addSerializer(new SqlTimeSerialization.SqlTimeSerializer()); - module.addDeserializer(Time.class, new SqlTimeSerialization.SqlTimeDeserializer()); - module.addDeserializer(AtomicLong.class, new AtomicLongDeserializer()); - module.addSerializer(NullSafeBindException.class, new NullSafeBindExceptionSerializer()); - module.addSerializer(QueryKey.class, new QueryKeySerialization.Serializer()); - module.addDeserializer(SchemaKey.class, new QueryKeySerialization.SchemaKeyDeserializer()); - module.addDeserializer(FieldKey.class, new QueryKeySerialization.FieldKeyDeserializer()); - module.addSerializer(Path.class, new PathSerialization.Serializer()); - module.addDeserializer(Path.class, new PathSerialization.Deserializer()); - module.addSerializer(CronExpression.class, new CronExpressionSerialization.Serializer()); - module.addDeserializer(CronExpression.class, new CronExpressionSerialization.Deserializer()); - module.addSerializer(URI.class, new URISerialization.Serializer()); - module.addDeserializer(URI.class, new URISerialization.Deserializer()); - module.addSerializer(File.class, new FileSerialization.Serializer()); - module.addDeserializer(File.class, new FileSerialization.Deserializer()); - module.addDeserializer(Filter.class, new FilterDeserializer()); - - mapper.registerModule(module); - return mapper; - } - - public abstract static class TestSerialization extends org.junit.Assert - { - public void testSerialize(PipelineJob job, @Nullable Logger log) - { - PipelineStatusFile.JobStore jobStore = PipelineJobService.get().getJobStore(); - try - { - if (null != log) - log.info("Hi Logger is here!"); - String json = jobStore.serializeToJSON(job, true); - if (null != log) - log.info(json); - PipelineJob job2 = jobStore.deserializeFromJSON(json, job.getClass()); - if (null != log) - log.info(job2.toString()); - - List errors = job.compareJobs(job2); - if (!errors.isEmpty()) - { - fail("Pipeline objects don't match: " + StringUtils.join(errors, ",")); - } - } - catch (Exception e) - { - if (null != log) - log.error("Class not found", e); - } - } - } - - @Override - public boolean equals(Object o) - { - // Fix issue 35876: Second run of a split XTandem pipeline job not completing - don't rely on the job being - // represented in memory as a single object - if (this == o) return true; - if (!(o instanceof PipelineJob that)) return false; - return Objects.equals(_jobGUID, that._jobGUID); - } - - @Override - public int hashCode() - { - return Objects.hash(_jobGUID); - } - - public List compareJobs(PipelineJob job2) - { - PipelineJob job1 = this; - List errors = new ArrayList<>(); - if (!PropertyUtil.nullSafeEquals(job1._activeTaskId, job2._activeTaskId)) - errors.add("_activeTaskId"); - if (job1._activeTaskRetries != job2._activeTaskRetries) - errors.add("_activeTaskRetries"); - if (!PropertyUtil.nullSafeEquals(job1._activeTaskStatus, job2._activeTaskStatus)) - errors.add("_activeTaskStatus"); - if (job1._errors != job2._errors) - errors.add("_errors"); - if (job1._interrupted != job2._interrupted) - errors.add("_interrupted"); - if (!PropertyUtil.nullSafeEquals(job1._jobGUID, job2._jobGUID)) - errors.add("_jobGUID"); - if (!PropertyUtil.nullSafeEquals(job1._logFile, job2._logFile)) - { - if (null == job1._logFile || null == job2._logFile) - errors.add("_logFile"); - else if (!FileUtil.getAbsoluteCaseSensitiveFile(job1._logFile.toFile()).getAbsolutePath().equalsIgnoreCase(FileUtil.getAbsoluteCaseSensitiveFile(job2._logFile.toFile()).getAbsolutePath())) - errors.add("_logFile"); - } - if (!PropertyUtil.nullSafeEquals(job1._parentGUID, job2._parentGUID)) - errors.add("_parentGUID"); - if (!PropertyUtil.nullSafeEquals(job1._provider, job2._provider)) - errors.add("_provider"); - if (job1._submitted != job2._submitted) - errors.add("_submitted"); - - return errors; - } - - /** - * @return Path String for a local working directory, temporary if root is cloud based - */ - protected Path getWorkingDirectoryString() - { - return !getPipeRoot().isCloudRoot() ? getPipeRoot().getRootNioPath() : FileUtil.getTempDirectory().toPath(); - } - - /** - * Generate a LocalDirectory and log file, temporary if need be, for use by the job - * Note: Override getDefaultLocalDirectoryString if piperoot isn't the desired local directory - * - * @param pipeRoot Pipeline's root directory - * @param moduleName supplying the pipeline - * @param baseLogFileName base name of the log file - */ - protected final void setupLocalDirectoryAndJobLog(PipeRoot pipeRoot, String moduleName, String baseLogFileName) - { - LocalDirectory localDirectory = LocalDirectory.create(pipeRoot, moduleName, baseLogFileName, getWorkingDirectoryString()); - setLocalDirectory(localDirectory); - setLogFile(localDirectory.determineLogFile()); - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.pipeline; + +import com.fasterxml.jackson.annotation.JsonAutoDetect; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.PropertyAccessor; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.databind.module.SimpleModule; +import datadog.trace.api.CorrelationIdentifier; +import datadog.trace.api.Trace; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.Marker; +import org.apache.logging.log4j.ThreadContext; +import org.apache.logging.log4j.message.Message; +import org.apache.logging.log4j.simple.SimpleLogger; +import org.apache.logging.log4j.util.PropertiesUtil; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.action.NullSafeBindException; +import org.labkey.api.assay.AssayFileWriter; +import org.labkey.api.data.Container; +import org.labkey.api.exp.api.ExpRun; +import org.labkey.api.gwt.client.util.PropertyUtil; +import org.labkey.api.pipeline.file.FileAnalysisJobSupport; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.QueryKey; +import org.labkey.api.query.SchemaKey; +import org.labkey.api.reader.Readers; +import org.labkey.api.security.User; +import org.labkey.api.util.DateUtil; +import org.labkey.api.util.ExceptionUtil; +import org.labkey.api.util.FileType; +import org.labkey.api.util.FileUtil; +import org.labkey.api.util.GUID; +import org.labkey.api.util.Job; +import org.labkey.api.util.JsonUtil; +import org.labkey.api.util.NetworkDrive; +import org.labkey.api.util.QuietCloser; +import org.labkey.api.util.URLHelper; +import org.labkey.api.util.logging.LogHelper; +import org.labkey.api.view.ActionURL; +import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.api.writer.ContainerUser; +import org.labkey.api.writer.PrintWriters; +import org.labkey.remoteapi.query.Filter; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; +import org.quartz.CronExpression; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStream; +import java.io.PrintWriter; +import java.io.Serializable; +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.sql.Time; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; + +/** + * A job represents the invocation of a pipeline on a certain set of inputs. It can be monolithic (a single run() method) + * or be comprised of multiple tasks ({@link Task}) that can be checkpointed and restarted individually. + */ +@JsonIgnoreProperties(value={"_logFilePathName"}, allowGetters = true) //Property removed. Added here for backwards compatibility +abstract public class PipelineJob extends Job implements Serializable, ContainerUser +{ + public static final FileType FT_LOG = new FileType(Arrays.asList(".log"), ".log", Arrays.asList("text/plain")); + + public static final String PIPELINE_EMAIL_ADDRESS_PARAM = "pipeline, email address"; + public static final String PIPELINE_USERNAME_PARAM = "pipeline, username"; + public static final String PIPELINE_PROTOCOL_NAME_PARAM = "pipeline, protocol name"; + public static final String PIPELINE_PROTOCOL_DESCRIPTION_PARAM = "pipeline, protocol description"; + public static final String PIPELINE_LOAD_FOLDER_PARAM = "pipeline, load folder"; + public static final String PIPELINE_JOB_INFO_PARAM = "pipeline, jobInfo"; + public static final String PIPELINE_TASK_INFO_PARAM = "pipeline, taskInfo"; + public static final String PIPELINE_TASK_OUTPUT_PARAMS_PARAM = "pipeline, taskOutputParams"; + + protected static Logger _log = LogHelper.getLogger(PipelineJob.class, "Execution and queuing of pipeline jobs"); + // Send start/stop messages to a separate logger because the default logger for this class is set to + // only write ERROR level events to the system log + private static final Logger _logJobStopStart = LogManager.getLogger(Job.class); + + public static Logger getJobLogger(Class clazz) + { + return LogManager.getLogger(PipelineJob.class.getName() + ".." + clazz.getName()); + } + + public RecordedActionSet getActionSet() + { + return _actionSet; + } + + /** + * Clear out the set of recorded actions + * @param run run that represents the previous set of recorded actions + */ + public void clearActionSet(ExpRun run) + { + _actionSet = new RecordedActionSet(); + } + + public FileLike getLogFileLike() + { + return FileSystemLike.wrapFile(getLogFilePath()); + } + + public enum TaskStatus + { + /** Job is in the queue, waiting for its turn to run */ + waiting + { + @Override + public boolean isActive() { return true; } + + @Override + public boolean matches(String statusText) + { + if (statusText == null) + return false; + else if (!TaskStatus.splitWaiting.matches(statusText) && statusText.toLowerCase().endsWith("waiting")) + return true; + return super.matches(statusText); + } + }, + /** Job is doing its work */ + running + { + @Override + public boolean isActive() { return true; } + }, + /** Terminal state, job is finished and completed without errors */ + complete + { + @Override + public boolean isActive() { return false; } + }, + /** Terminal state (but often retryable), job is done running and completed with error(s) */ + error + { + @Override + public boolean isActive() { return false; } + }, + /** Job is in the process of being cancelled, but may still be running or queued at the moment */ + cancelling + { + @Override + public boolean isActive() { return true; } + }, + /** Terminal state, indicating that a user cancelled the job before it completed or errored */ + cancelled + { + @Override + public boolean isActive() { return false; } + }, + splitWaiting + { + @Override + public boolean isActive() { return false; } + + @Override + public String toString() { return "SPLIT WAITING"; } + }; + + /** @return whether this step is considered to be actively running */ + public abstract boolean isActive(); + + public String toString() + { + return super.toString().toUpperCase(); + } + + public boolean matches(String statusText) + { + return toString().equalsIgnoreCase(statusText); + } + + public final String getNotificationType() + { + return getClass().getName() + "." + name(); + } + } + + /** + * Implements a runnable to complete a part of the + * processing associated with a particular PipelineJob. This is often the execution of an external tool, + * the importing of files into the database, etc. + */ + abstract static public class Task + { + private final PipelineJob _job; + protected FactoryType _factory; + + public Task(FactoryType factory, PipelineJob job) + { + _job = job; + _factory = factory; + } + + public PipelineJob getJob() + { + return _job; + } + + /** + * Do the work of the task. The task should not set the status of the job to complete - this will be handled + * by the caller. + * @return the files used as inputs and generated as outputs, and the steps that operated on them + * @throws PipelineJobException if something went wrong during the execution of the job. The caller will + * handle setting the job's status to ERROR. + */ + @NotNull + public abstract RecordedActionSet run() throws PipelineJobException; + } + + /* + * JMS message header names + */ + private static final String HEADER_PREFIX = "LABKEY_"; + public static final String LABKEY_JOBTYPE_PROPERTY = HEADER_PREFIX + "JOBTYPE"; + public static final String LABKEY_JOBID_PROPERTY = HEADER_PREFIX + "JOBID"; + public static final String LABKEY_CONTAINERID_PROPERTY = HEADER_PREFIX + "CONTAINERID"; + public static final String LABKEY_TASKPIPELINE_PROPERTY = HEADER_PREFIX + "TASKPIPELINE"; + public static final String LABKEY_TASKID_PROPERTY = HEADER_PREFIX + "TASKID"; + public static final String LABKEY_TASKSTATUS_PROPERTY = HEADER_PREFIX + "TASKSTATUS"; + /** The execution location to which the job's current task is assigned */ + public static final String LABKEY_LOCATION_PROPERTY = HEADER_PREFIX + "LOCATION"; + + private String _provider; + private ViewBackgroundInfo _info; + private String _jobGUID; + private String _parentGUID; + private TaskId _activeTaskId; + @NotNull + private TaskStatus _activeTaskStatus; + private int _activeTaskRetries; + @NotNull + private PipeRoot _pipeRoot; + volatile private boolean _interrupted; + private boolean _submitted; + private int _errors; + private RecordedActionSet _actionSet = new RecordedActionSet(); + + private String _loggerLevel = Level.DEBUG.toString(); + + // Don't save these + protected transient Logger _logger; + private transient boolean _settingStatus; + private transient PipelineQueue _queue; + + private Path _logFile; + private LocalDirectory _localDirectory; + + // Default constructor for serialization + protected PipelineJob() + { + } + + /** Although having a null provider is legal, it is recommended that one be used + * so that it can respond to events as needed */ + public PipelineJob(@Nullable String provider, ViewBackgroundInfo info, @NotNull PipeRoot root) + { + _info = info; + _provider = provider; + _jobGUID = GUID.makeGUID(); + _activeTaskStatus = TaskStatus.waiting; + + + _pipeRoot = root; + + _actionSet = new RecordedActionSet(); + } + + public PipelineJob(PipelineJob job) + { + // Not yet queued + _queue = null; + + // New ID + _jobGUID = GUID.makeGUID(); + + // Copy everything else + _info = job._info; + _provider = job._provider; + _parentGUID = job._jobGUID; + _pipeRoot = job._pipeRoot; + _interrupted = job._interrupted; + _submitted = job._submitted; + _errors = job._errors; + _loggerLevel = job._loggerLevel; + _logger = job._logger; + _logFile = job._logFile; + + _activeTaskId = job._activeTaskId; + _activeTaskStatus = job._activeTaskStatus; + + _actionSet = new RecordedActionSet(job.getActionSet()); + _localDirectory = job._localDirectory; + } + + public String getProvider() + { + return _provider; + } + + @Deprecated + public void setProvider(String provider) + { + _provider = provider; + } + + public int getErrors() + { + return _errors; + } + + public void setErrors(int errors) + { + if (errors > 0) + _activeTaskStatus = TaskStatus.error; + + _errors = errors; + } + + /** + * This job has been restored from a checkpoint for the purpose of + * a retry. Record retry information before it is checkpointed again. + */ + public void retryUpdate() + { + _errors++; + _activeTaskRetries++; + } + + public Map getParameters() + { + return Collections.emptyMap(); + } + + public String getJobGUID() + { + return _jobGUID; + } + + public String getParentGUID() + { + return _parentGUID; + } + + @Nullable + public TaskId getActiveTaskId() + { + return _activeTaskId; + } + + public boolean setActiveTaskId(@Nullable TaskId activeTaskId) + { + return setActiveTaskId(activeTaskId, true); + } + + public boolean setActiveTaskId(@Nullable TaskId activeTaskId, boolean updateStatus) + { + if (activeTaskId == null || !activeTaskId.equals(_activeTaskId)) + { + _activeTaskId = activeTaskId; + _activeTaskRetries = 0; + } + if (_activeTaskId == null) + _activeTaskStatus = TaskStatus.complete; + else + _activeTaskStatus = TaskStatus.waiting; + + return !updateStatus || updateStatusForTask(); + } + + @NotNull + public TaskStatus getActiveTaskStatus() + { + return _activeTaskStatus; + } + + /** @return whether the status was set successfully */ + public boolean setActiveTaskStatus(@NotNull TaskStatus activeTaskStatus) + { + _activeTaskStatus = activeTaskStatus; + return updateStatusForTask(); + } + + public TaskFactory getActiveTaskFactory() + { + if (getActiveTaskId() == null) + return null; + + return PipelineJobService.get().getTaskFactory(getActiveTaskId()); + } + + @NotNull + public PipeRoot getPipeRoot() + { + return _pipeRoot; + } + + @Deprecated //Please switch to the FileLike version + public void setLogFile(File logFile) + { + setLogFile(logFile.toPath()); + } + + public void setLogFile(FileLike logFile) + { + setLogFile(logFile.toNioPathForWrite()); + } + + @Deprecated //Please switch to the FileLike version + public void setLogFile(Path logFile) + { + // Set Log file path and clear/reset logger + _logFile = logFile.toAbsolutePath().normalize(); + _logger = null; //This should trigger getting the new Logger next time getLogger is called + } + + public File getLogFile() + { + Path logFilePath = getLogFilePath(); + if (null != logFilePath && !FileUtil.hasCloudScheme(logFilePath)) + return logFilePath.toFile(); + return null; + } + + public Path getLogFilePath() + { + return _logFile; + } + + /** + * Get the remote log path (if local dir set) else return getLogFilePath + * + * TODO: Better name getStatusKeyPath? or similar + */ + public Path getRemoteLogPath() + { + LocalDirectory dir = getLocalDirectory(); + if (dir == null) + return getLogFilePath(); + + return dir.getRemoteLogFilePath(); + } + + /** Finds a file name that hasn't been used yet, appending ".2", ".3", etc as needed */ + public static File findUniqueLogFile(File primaryFile, String baseName) + { + String validBaseName = FileUtil.makeLegalName(baseName); + // need to look in current and archived dirs for any unused log file names (issue 20987) + File fileLog = FT_LOG.newFile(primaryFile.getParentFile(), validBaseName); + File archivedDir = FileUtil.appendName(primaryFile.getParentFile(), AssayFileWriter.ARCHIVED_DIR_NAME); + File fileLogArchived = FT_LOG.newFile(archivedDir, validBaseName); + + int index = 1; + while (NetworkDrive.exists(fileLog) || NetworkDrive.exists(fileLogArchived)) + { + fileLog = FT_LOG.newFile(primaryFile.getParentFile(), validBaseName + "." + (index)); + fileLogArchived = FT_LOG.newFile(archivedDir, validBaseName + "." + (index++)); + } + + return fileLog; + } + + + public LocalDirectory getLocalDirectory() + { + return _localDirectory; + } + + protected void setLocalDirectory(LocalDirectory localDirectory) + { + _localDirectory = localDirectory; + } + + public static PipelineJob readFromFile(File file) throws IOException, PipelineJobException + { + StringBuilder serializedJob = new StringBuilder(); + try (InputStream fIn = new FileInputStream(file)) + { + BufferedReader reader = Readers.getReader(fIn); + String line; + while ((line = reader.readLine()) != null) + { + serializedJob.append(line); + } + } + + PipelineJob job = PipelineJob.deserializeJob(serializedJob.toString()); + if (null == job) + { + throw new PipelineJobException("Unable to deserialize job"); + } + return job; + } + + + public void writeToFile(File file) throws IOException + { + File newFile = new File(file.getPath() + ".new"); + File origFile = new File(file.getPath() + ".orig"); + + String serializedJob = serializeJob(true); + + try (FileOutputStream fOut = new FileOutputStream(newFile)) + { + PrintWriter writer = PrintWriters.getPrintWriter(fOut); + writer.write(serializedJob); + writer.flush(); + } + + if (NetworkDrive.exists(file)) + { + if (origFile.exists()) + { + // Might be left over from some bad previous run + origFile.delete(); + } + // Don't use File.renameTo() because it doesn't always work depending on the underlying file system + FileUtils.moveFile(file, origFile); + FileUtils.moveFile(newFile, file); + origFile.delete(); + } + else + { + FileUtils.moveFile(newFile, file); + } + PipelineJobService.get().getWorkDirFactory().setPermissions(file); + } + + public boolean updateStatusForTask() + { + TaskFactory factory = getActiveTaskFactory(); + TaskStatus status = getActiveTaskStatus(); + + if (factory != null && !TaskStatus.error.equals(status) && !TaskStatus.cancelled.equals(status)) + return setStatus(factory.getStatusName() + " " + status.toString().toUpperCase()); + else + return setStatus(status); + } + + /** Used for setting status to one of the standard states */ + public boolean setStatus(@NotNull TaskStatus status) + { + return setStatus(status.toString()); + } + + /** + * Used for setting status to a custom state, which is considered to be equivalent to TaskStatus.running + * unless it matches one of the standard states + * @throws CancelledException if the job was cancelled by a user and should stop execution + */ + public boolean setStatus(@NotNull String status) + { + return setStatus(status, null); + } + + /** + * Used for setting status to one of the standard states + * @param info more verbose detail on the job's status, such as a percent complete + * @throws CancelledException if the job was cancelled by a user and should stop execution + */ + public boolean setStatus(@NotNull TaskStatus status, @Nullable String info) + { + return setStatus(status.toString(), info); + } + + /** + * @param info more verbose detail on the job's status, such as a percent complete + * @throws CancelledException if the job was cancelled by a user and should stop execution + */ + public boolean setStatus(@NotNull String status, @Nullable String info) + { + return setStatus(status, info, false); + } + + /** + * Used for setting status to a custom state, which is considered to be equivalent to TaskStatus.running + * unless it matches one of the standard states + * @throws CancelledException if the job was cancelled by a user and should stop execution + */ + public boolean setStatus(@NotNull String status, @Nullable String info, boolean allowInsert) + { + if (_settingStatus) + return true; + + _settingStatus = true; + try + { + boolean statusSet = PipelineJobService.get().getStatusWriter().setStatus(this, status, info, allowInsert); + if (!statusSet) + { + setActiveTaskStatus(TaskStatus.error); + } + return statusSet; + } + // Rethrow so it doesn't get handled like other RuntimeExceptions + catch (CancelledException e) + { + _activeTaskStatus = TaskStatus.cancelled; + throw e; + } + catch (RuntimeException e) + { + Path f = this.getLogFilePath(); + error("Failed to set status to '" + status + "' for '" + + (f == null ? "" : f.toString()) + "'.", e); + throw e; + } + catch (Exception e) + { + Path f = this.getLogFilePath(); + error("Failed to set status to '" + status + "' for '" + + (f == null ? "" : f.toString()) + "'.", e); + } + finally + { + _settingStatus = false; + } + return false; + } + + public void restoreQueue(PipelineQueue queue) + { + // Recursive split and join combinations may cause the queue + // to be restored to a job with a queue already. Would be good + // to have better safe-guards against double-queueing of jobs. + if (queue == _queue) + return; + if (null != _queue) + throw new IllegalStateException(); + _queue = queue; + } + + public void restoreLocalDirectory() + { + if (null != _localDirectory) + setLogFile(_localDirectory.restore()); + } + + public void validateParameters() throws PipelineValidationException + { + TaskPipeline taskPipeline = getTaskPipeline(); + if (taskPipeline != null) + { + for (TaskId taskId : taskPipeline.getTaskProgression()) + { + TaskFactory taskFactory = PipelineJobService.get().getTaskFactory(taskId); + if (taskFactory == null) + throw new PipelineValidationException("Task '" + taskId + "' not found"); + taskFactory.validateParameters(this); + } + } + } + + public boolean setQueue(PipelineQueue queue, TaskStatus initialState) + { + return setQueue(queue, initialState.toString()); + } + + public boolean setQueue(PipelineQueue queue, String initialState) + { + restoreQueue(queue); + + // Initialize the task pipeline + TaskPipeline taskPipeline = getTaskPipeline(); + if (taskPipeline != null) + { + // Save the current job state marshalled to XML, in case of error. + String serializedJob = serializeJob(true); + + // Note runStateMachine returns false, if the job cannot be run locally. + // The job may still need to be put on a JMS queue for remote processing. + // Therefore, the return value cannot be used to determine whether the + // job should be queued. + runStateMachine(); + + // If an error occurred trying to find the first runnable state, then + // store the original job state to allow retry. + if (getActiveTaskStatus() == TaskStatus.error) + { + try + { + PipelineJob originalJob = PipelineJob.deserializeJob(serializedJob); + if (null != originalJob) + originalJob.store(); + else + warn("Failed to checkpoint '" + getDescription() + "' job."); + + } + catch (Exception e) + { + warn("Failed to checkpoint '" + getDescription() + "' job.", e); + } + return false; + } + + // If initialization put this job into a state where it is + // waiting, then it should not be put on the queue. + return !isSplitWaiting(); + } + // Initialize status for non-task pipeline jobs. + else if (_logFile != null) + { + setStatus(initialState); + try + { + store(); + } + catch (Exception e) + { + warn("Failed to checkpoint '" + getDescription() + "' job before queuing.", e); + } + } + + return true; + } + + public void clearQueue() + { + _queue = null; + } + + abstract public URLHelper getStatusHref(); + + abstract public String getDescription(); + + public String toString() + { + return super.toString() + " " + StringUtils.trimToEmpty(getDescription()); + } + + public T getJobSupport(Class inter) + { + if (inter.isInstance(this)) + return (T) this; + + throw new UnsupportedOperationException("Job type " + getClass().getName() + + " does not implement " + inter.getName()); + } + + /** + * Override to provide a TaskPipeline with the option of + * running some tasks remotely. Override the run() function + * to implement the job as a single monolithic task. + * + * @return a task pipeline to run for this job + */ + @Nullable + public TaskPipeline getTaskPipeline() + { + return null; + } + + public boolean isActiveTaskLocal() + { + TaskFactory factory = getActiveTaskFactory(); + return (factory != null && + TaskFactory.WEBSERVER.equalsIgnoreCase(factory.getExecutionLocation())); + } + + public void runActiveTask() throws IOException, PipelineJobException + { + TaskFactory factory = getActiveTaskFactory(); + if (factory == null) + return; + + if (!factory.isJobComplete(this)) + { + Task task = factory.createTask(this); + if (task == null) + return; // Bad task key. + + if (!setActiveTaskStatus(TaskStatus.running)) + { + // The user has deleted (cancelled) the job. + // Throwing this exception will cause the job to go to the ERROR state and stop running + throw new PipelineJobException("Job no longer in database - aborting"); + } + + WorkDirectory workDirectory = null; + RecordedActionSet actions; + + boolean success = false; + try + { + logStartStopInfo("Starting to run task '" + factory.getId() + "' for job '" + this + "' with log file " + getLogFilePath()); + getLogger().info("Starting to run task '" + factory.getId() + "' at location '" + factory.getExecutionLocation() + "'"); + if (PipelineJobService.get().getLocationType() != PipelineJobService.LocationType.WebServer) + { + PipelineJobService.RemoteServerProperties remoteProps = PipelineJobService.get().getRemoteServerProperties(); + if (remoteProps != null) + { + getLogger().info("on host: '" + remoteProps.getHostName() + "'"); + } + } + + if (task instanceof WorkDirectoryTask wdTask) + { + workDirectory = factory.createWorkDirectory(getJobGUID(), getJobSupport(FileAnalysisJobSupport.class), getLogger()); + wdTask.setWorkDirectory(workDirectory); + } + + actions = task.run(); + success = true; + } + finally + { + getLogger().info((success ? "Successfully completed" : "Failed to complete") + " task '" + factory.getId() + "'"); + logStartStopInfo((success ? "Successfully completed" : "Failed to complete") + " task '" + factory.getId() + "' for job '" + this + "' with log file " + getLogFile()); + + try + { + if (workDirectory != null) + { + workDirectory.remove(success); + ((WorkDirectoryTask)task).setWorkDirectory(null); + } + } + catch (IOException e) + { + // Don't let this cleanup error mask an original error that causes the job to fail + if (success) + { + // noinspection ThrowFromFinallyBlock + throw e; + } + else + { + if (e.getMessage() != null) + { + error(e.getMessage()); + } + else + { + error("Failed to clean up work directory after error condition, see full error information below.", e); + } + } + } + } + _actionSet.add(actions); + + // An error occurred running the task. Do not complete. + if (TaskStatus.error.equals(getActiveTaskStatus())) + return; + } + else + { + logStartStopInfo("Skipping already completed task '" + factory.getId() + "' for job '" + this + "' with log file " + getLogFile()); + getLogger().info("Skipping already completed task '" + factory.getId() + "' at location '" + factory.getExecutionLocation() + "'"); + } + + if (getActiveTaskStatus() != TaskStatus.complete && getActiveTaskStatus() != TaskStatus.cancelled) + setActiveTaskStatus(TaskStatus.complete); + } + + public static void logStartStopInfo(String message) + { + _logJobStopStart.info(message); + } + + public boolean runStateMachine() + { + TaskPipeline pipeline = getTaskPipeline(); + if (pipeline == null) + { + assert false : "Either override getTaskPipeline() or run() for " + getClass(); + + // Best we can do is to complete the job. + setActiveTaskId(null); + return false; + } + + TaskId[] progression = pipeline.getTaskProgression(); + int i = 0; + if (_activeTaskId != null) + { + i = indexOfActiveTask(progression); + if (i == -1) + { + error("Active task " + _activeTaskId + " not found in task pipeline."); + return false; + } + } + + switch (_activeTaskStatus) + { + case waiting: + return findRunnableTask(progression, i); + + case complete: + // See if the job has already completed. + if (_activeTaskId == null) + return false; + + return findRunnableTask(progression, i + 1); + + case error: + // Make sure the status is in error state, so that any auto-retry that + // may occur will record the error. And, if no retry occurs, then this + // job must be in error state. + try + { + PipelineJobService.get().getStatusWriter().ensureError(this); + } + catch (Exception e) + { + warn("Failed to ensure error status on task error.", e); + } + + // Run auto-retry, and retry if appropriate. + autoRetry(); + return false; + + case running: + case cancelled: + case cancelling: + default: + return false; // Do not run the active task. + } + } + + private int indexOfActiveTask(TaskId[] progression) + { + for (int i = 0; i < progression.length; i++) + { + TaskFactory factory = PipelineJobService.get().getTaskFactory(progression[i]); + if (factory == null) + { + throw new IllegalStateException("Could not find factory for " + progression[i]); + } + if (factory.getId().equals(_activeTaskId) || + factory.getActiveId(this).equals(_activeTaskId)) + return i; + } + return -1; + } + + private boolean findRunnableTask(TaskId[] progression, int i) + { + // Search for next task that is not already complete + TaskFactory factory = null; + while (i < progression.length) + { + try + { + factory = PipelineJobService.get().getTaskFactory(progression[i]); + if (factory == null) + { + throw new IllegalStateException("Could not find factory for " + progression[i]); + } + // Stop, if this task requires a change in join state + if ((factory.isJoin() && isSplitJob()) || (!factory.isJoin() && isSplittable())) + break; + // Stop, if this task is part of processing this job, and not complete + if (factory.isParticipant(this) && !factory.isJobComplete(this)) + break; + } + catch (IOException e) + { + error(e.getMessage()); + return false; + } + + i++; + } + + if (i < progression.length) + { + if (factory.isJoin() && isSplitJob()) + { + setActiveTaskId(factory.getId(), false); // ID is just a marker for state machine + join(); + return false; + } + else if (!factory.isJoin() && isSplittable()) + { + setActiveTaskId(factory.getId(), false); // ID is just a marker for state machine + split(); + return false; + } + + // Set next task to be run + if (!setActiveTaskId(factory.getActiveId(this))) + { + return false; + } + + // If it is local, then it can be run + return isActiveTaskLocal(); + } + else + { + // Job is complete + if (isSplitJob()) + { + setActiveTaskId(null, false); + join(); + } + else + { + setActiveTaskId(null); + } + return false; + } + } + + public boolean isAutoRetry() + { + TaskFactory factory = getActiveTaskFactory(); + return null != factory && _activeTaskRetries < factory.getAutoRetry() && factory.isAutoRetryEnabled(this); + } + + public boolean autoRetry() + { + try + { + if (isAutoRetry()) + { + info("Attempting to auto-retry"); + PipelineJobService.get().getJobStore().retry(getJobGUID()); + // Retry has been queued + return true; + } + } + catch (IOException | NoSuchJobException e) + { + warn("Failed to start automatic retry.", e); + } + return false; + } + + /** + * Subclasses that override this method instead of defining a task pipeline are responsible for setting the job's + * status at the end of their execution to either COMPLETE or ERROR + */ + @Override @Trace + public void run() + { + assert ThreadContext.isEmpty(); // Prevent/detect leaks + // Connect log messages with the active trace and span + ThreadContext.put(CorrelationIdentifier.getTraceIdKey(), CorrelationIdentifier.getTraceId()); + ThreadContext.put(CorrelationIdentifier.getSpanIdKey(), CorrelationIdentifier.getSpanId()); + + try + { + // The act of queueing the job runs the state machine for the first time. + do + { + try + { + runActiveTask(); + } + catch (IOException | PipelineJobException e) + { + error(e.getMessage(), e); + } + catch (CancelledException e) + { + throw e; + } + catch (RuntimeException e) + { + error(e.getMessage(), e); + ExceptionUtil.logExceptionToMothership(null, e); + // Rethrow to let the standard Mule exception handler fire and deal with the job state + throw e; + } + } + while (runStateMachine()); + } + catch (CancelledException e) + { + _activeTaskStatus = TaskStatus.cancelled; + // Don't need to do anything else, job has already been set to CANCELLED + } + finally + { + PipelineService.get().getPipelineQueue().almostDone(this); + + ThreadContext.remove(CorrelationIdentifier.getTraceIdKey()); + ThreadContext.remove(CorrelationIdentifier.getSpanIdKey()); + } + } + + // Should be called in run()'s finally by any class that overrides run(), if class uses LocalDirectory + protected void finallyCleanUpLocalDirectory() + { + if (null != _localDirectory && isDone()) + { + try + { + Path remoteLogFilePath = _localDirectory.cleanUpLocalDirectory(); + + //Update job log entry's log location to remote path + if (null != remoteLogFilePath) + { + //NOTE: any errors here can't be recorded to job log as it may no longer be local and writable + setLogFile(remoteLogFilePath); + setStatus(getActiveTaskStatus()); // Force writing to statusFiles + } + } + catch (JobLogInaccessibleException e) + { + // Can't write to job log as the log file is either null or inaccessible. + ExceptionUtil.logExceptionToMothership(null, e); + } + catch (Exception e) + { + // Attempt to record the error to the log. Move failed, so log should still be local and writable. + error("Error trying to move log file", e); + } + } + } + + /** + * Override and return true for job that may be split. Also, override + * the createSplitJobs() method to return the sub-jobs. + * + * @return true if the job may be split + */ + public boolean isSplittable() + { + return false; + } + + /** + * @return true if this is a split job, as determined by whether it has a parent. + */ + public boolean isSplitJob() + { + return getParentGUID() != null; + } + + /** + * @return true if this is a join job waiting for split jobs to complete. + */ + public boolean isSplitWaiting() + { + // Return false, if this job cannot be split. + if (!isSplittable()) + return false; + + // A join job with an active task that is not a join task, + // is waiting for a split to complete. + TaskFactory factory = getActiveTaskFactory(); + return (factory != null && !factory.isJoin()); + } + + /** + * Override and return instances of sub-jobs for a splittable job. + * + * @return sub-jobs requiring separate processing + */ + public List createSplitJobs() + { + return Collections.singletonList(this); + } + + /** + * Handles merging accumulated changes from split jobs into this job, which + * is a joined job. + * + * @param job the split job that has run to completion + */ + public void mergeSplitJob(PipelineJob job) + { + // Add experiment actions recorded. + _actionSet.add(job.getActionSet()); + + // Add any errors that happened in the split job. + _errors += job._errors; + } + + public void store() throws NoSuchJobException + { + PipelineJobService.get().getJobStore().storeJob(this); + } + + private void split() + { + try + { + PipelineJobService.get().getJobStore().split(this); + } + catch (IOException e) + { + error(e.getMessage(), e); + } + } + + private void join() + { + try + { + PipelineJobService.get().getJobStore().join(this); + } + catch (IOException | NoSuchJobException e) + { + error(e.getMessage(), e); + } + } + + ///////////////////////////////////////////////////////////////////////// + // Support for running processes + + @Nullable + private PrintWriter createPrintWriter(@Nullable File outputFile, boolean append) throws PipelineJobException + { + if (outputFile == null) + return null; + + try + { + return new PrintWriter(new BufferedWriter(new FileWriter(outputFile, append))); + } + catch (IOException e) + { + throw new PipelineJobException("Could not create the " + outputFile + " file.", e); + } + } + + public void runSubProcess(ProcessBuilder pb, File dirWork) throws PipelineJobException + { + runSubProcess(pb, dirWork, null, 0, false); + } + + /** + * If logLineInterval is greater than 1, the first logLineInterval lines of output will be written to the + * job's main log file. + */ + public void runSubProcess(ProcessBuilder pb, File dirWork, File outputFile, int logLineInterval, boolean append) + throws PipelineJobException + { + runSubProcess(pb, dirWork, outputFile, logLineInterval, append, 0, null); + } + + public void runSubProcess(ProcessBuilder pb, File dirWork, File outputFile, int logLineInterval, boolean append, long timeout, TimeUnit timeoutUnit) + throws PipelineJobException + { + Process proc; + + String commandName = pb.command().get(0); + commandName = commandName.substring( + Math.max(commandName.lastIndexOf('/'), commandName.lastIndexOf('\\')) + 1); + header(commandName + " output"); + + // Update PATH environment variable to make sure all files in the tools + // directory and the directory of the executable or on the path. + String toolDir = PipelineJobService.get().getAppProperties().getToolsDirectory(); + if (!StringUtils.isEmpty(toolDir)) + { + String path = System.getenv("PATH"); + if (path == null) + { + path = toolDir; + } + else + { + path = toolDir + File.pathSeparatorChar + path; + } + + // If the command has a path, then prepend its parent directory to the PATH + // environment variable as well. + String exePath = pb.command().get(0); + if (exePath != null && !exePath.isEmpty() && exePath.indexOf(File.separatorChar) != -1) + { + File fileExe = new File(exePath); + String exeDir = fileExe.getParent(); + if (!exeDir.equals(toolDir) && fileExe.exists()) + path = fileExe.getParent() + File.pathSeparatorChar + path; + } + + pb.environment().put("PATH", path); + + String dyld = System.getenv("DYLD_LIBRARY_PATH"); + if (dyld == null) + { + dyld = toolDir; + } + else + { + dyld = toolDir + File.pathSeparatorChar + dyld; + } + pb.environment().put("DYLD_LIBRARY_PATH", dyld); + } + + // tell more modern TPP tools to run headless (so no perl calls etc) bpratt 4-14-09 + pb.environment().put("XML_ONLY", "1"); + // tell TPP tools not to mess with tmpdirs, we handle this at higher level + pb.environment().put("WEBSERVER_TMP",""); + + try + { + pb.directory(dirWork); + + // TODO: Errors should go to log even when output is redirected to a file. + pb.redirectErrorStream(true); + + info("Working directory is " + dirWork.getAbsolutePath()); + info("running: " + StringUtils.join(pb.command().iterator(), " ")); + + proc = pb.start(); + } + catch (SecurityException se) + { + throw new PipelineJobException("Failed starting process '" + pb.command() + "'. Permissions do not allow execution.", se); + } + catch (IOException eio) + { + throw new PipelineJobException("Failed starting process '" + pb.command() + "'", eio); + } + + + try (QuietCloser ignored = PipelineJobService.get().trackForCancellation(proc)) + { + // create thread pool for collecting the process output + ExecutorService pool = Executors.newSingleThreadExecutor(); + + try (PrintWriter fileWriter = createPrintWriter(outputFile, append)) + { + // collect output using separate thread so we can enforce a timeout on the process + Future output = pool.submit(() -> { + try (BufferedReader procReader = Readers.getReader(proc.getInputStream())) + { + String line; + int count = 0; + while ((line = procReader.readLine()) != null) + { + count++; + if (fileWriter == null) + info(line); + else + { + if (logLineInterval > 0 && count < logLineInterval) + info(line); + else if (count == logLineInterval) + info("Writing additional tool output lines to " + outputFile.getName()); + fileWriter.println(line); + } + } + return count; + } + }); + + try + { + if (timeout > 0) + { + if (!proc.waitFor(timeout, timeoutUnit)) + { + proc.destroyForcibly().waitFor(); + + error("Process killed after exceeding timeout of " + timeout + " " + timeoutUnit.name().toLowerCase()); + } + } + else + { + proc.waitFor(); + } + + int result = proc.exitValue(); + if (result != 0) + { + throw new ToolExecutionException("Failed running " + pb.command().get(0) + ", exit code " + result, result); + } + + int count = output.get(); + if (fileWriter != null) + info(count + " lines written total to " + outputFile.getName()); + } + catch (InterruptedException ei) + { + throw new PipelineJobException("Interrupted process for '" + dirWork.getPath() + "'.", ei); + } + catch (ExecutionException e) + { + // Exception thrown in output collecting thread + Throwable cause = e.getCause(); + if (cause instanceof IOException) + throw new PipelineJobException("Failed writing output for process in '" + dirWork.getPath() + "'.", cause); + + throw new PipelineJobException(cause); + } + } + finally + { + pool.shutdownNow(); + } + } + } + + public String getLogLevel() + { + return _loggerLevel; + } + + public void setLogLevel(String level) + { + if (!_loggerLevel.equals(level)) + { + _loggerLevel = level; + _logger = null; // Reset the logger + } + } + + public Logger getClassLogger() + { + return _log; + } + + private static class OutputLogger extends SimpleLogger + { + private final PipelineJob _job; + private boolean _isSettingStatus; + private final Path _file; + private final String LINE_SEP = System.lineSeparator(); + private final String datePattern = "dd MMM yyyy HH:mm:ss,SSS"; + + protected OutputLogger(PipelineJob job, Path file, String name, Level level) + { + super(name, level, false, false, false, false, "", null, new PropertiesUtil(PropertiesUtil.getSystemProperties()), null); + _job = job; + _file = file; + } + + // called from LogOutputStream.flush() + @Override + public void log(Level level, String message) + { + _job.getClassLogger().log(level, message); + write(message, null, level.toString()); + } + + private String getSystemLogMessage(Object message) + { + StringBuilder sb = new StringBuilder(); + sb.append("(from pipeline job log file "); + sb.append(_job.getLogFile().toString()); + if (message != null) + { + sb.append(": "); + String stringMessage = message.toString(); + // Limit the maximum line length + final int maxLength = 10000; + if (stringMessage.length() > maxLength) + { + stringMessage = stringMessage.substring(0, maxLength) + "..."; + } + sb.append(stringMessage); + } + sb.append(")"); + return sb.toString(); + } + + public void setErrorStatus(Object message) + { + if (_isSettingStatus || _job._activeTaskStatus == TaskStatus.cancelled) + return; + + _isSettingStatus = true; + try + { + _job.setStatus(TaskStatus.error, message == null ? "ERROR" : message.toString()); + } + finally + { + _isSettingStatus = false; + } + } + + @Override + public void logMessage(String fqcn, Level mgsLevel, Marker marker, Message msg, Throwable throwable) + { + if (_job.getClassLogger().isEnabled(mgsLevel, marker)) + { + _job.getClassLogger().log(mgsLevel, marker, new Message() + { + @Override + public String getFormattedMessage() + { + return getSystemLogMessage(msg.getFormattedMessage()); + } + + @Override + public Object[] getParameters() + { + return msg.getParameters(); + } + + @Override + public Throwable getThrowable() + { + return msg.getThrowable(); + } + }, throwable); + } + + // Write to the job's log before setting the error status, which may end up throwing a CancelledException + // to signal that we need to bail out right away + write(msg.getFormattedMessage(), throwable, mgsLevel.getStandardLevel().name()); + + if (mgsLevel.isMoreSpecificThan(Level.ERROR)) + { + setErrorStatus(msg.getFormattedMessage()); + } + } + + private void write(String message, @Nullable Throwable t, String level) + { + String formattedDate = DateUtil.formatDateTime(new Date(), datePattern); + + try (PrintWriter writer = new PrintWriter(Files.newBufferedWriter(_file, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND))) + { + var line = formattedDate + " " + + String.format("%-5s", level) + + ": " + + message; + writer.write(line); + writer.write(LINE_SEP); + if (null != t) + { + t.printStackTrace(writer); + } + } + catch (IOException e) + { + Path parentFile = _file.getParent(); + if (parentFile != null && !NetworkDrive.exists(parentFile)) + { + try + { + FileUtil.createDirectories(parentFile); + write(message, t, level); + } + catch (IOException dirE) + { + _log.error("Failed appending to file. Unable to create parent directories", e); + } + } + else + _log.error("Failed appending to file.", e); + } + } + } + + public static class JobLogInaccessibleException extends IllegalStateException + { + public JobLogInaccessibleException(String message) + { + super(message); + } + } + + // Multiple threads log messages, so synchronize to make sure that no one gets a partially initialized logger + public synchronized Logger getLogger() + { + if (_logger == null) + { + if (null == _logFile || FileUtil.hasCloudScheme(_logFile)) + throw new JobLogInaccessibleException("LogFile null or cloud."); + + // Create appending logger. + String loggerName = PipelineJob.class.getSimpleName() + ".Logger." + _logFile.toString(); + _logger = new OutputLogger(this, _logFile, loggerName, Level.toLevel(_loggerLevel)); + } + + return _logger; + } + + public void error(String message) + { + error(message, null); + } + + public void error(String message, @Nullable Throwable t) + { + setErrors(getErrors() + 1); + if (getLogger() != null) + getLogger().error(message, t); + } + + public void debug(String message) + { + debug(message, null); + } + + public void debug(String message, @Nullable Throwable t) + { + if (getLogger() != null) + getLogger().debug(message, t); + } + + public void warn(String message) + { + warn(message, null); + } + + public void warn(String message, @Nullable Throwable t) + { + if (getLogger() != null) + getLogger().warn(message, t); + } + + public void info(String message) + { + info(message, null); + } + + public void info(String message, @Nullable Throwable t) + { + if (getLogger() != null) + getLogger().info(message, t); + } + + public void header(String message) + { + info(message); + info("======================================="); + } + + ///////////////////////////////////////////////////////////////////////// + // ViewBackgroundInfo access + // WARNING: Some access of ViewBackgroundInfo is not supported when + // the job is running outside the LabKey Server. + + /** + * Gets the container ID from the ViewBackgroundInfo. + * + * @return the ID for the container in which the job was started + */ + public String getContainerId() + { + return getInfo().getContainerId(); + } + + /** + * Gets the User instance from the ViewBackgroundInfo. + * WARNING: Not supported if job is not running in the LabKey web server. + * + * @return the user who started the job + * @throws IllegalStateException if invoked on a remote pipeline server + */ + @Override + public User getUser() + { + if (!PipelineJobService.get().isWebServer()) + { + throw new IllegalStateException("User lookup not available on remote pipeline servers"); + } + return getInfo().getUser(); + } + + /** + * Gets the Container instance from the ViewBackgroundInfo. + * WARNING: Not supported if job is not running in the LabKey web server. + * + * @return the container in which the job was started + * @throws IllegalStateException if invoked on a remote pipeline server + */ + @Override + public Container getContainer() + { + if (!PipelineJobService.get().isWebServer()) + { + throw new IllegalStateException("User lookup not available on remote pipeline servers"); + } + return getInfo().getContainer(); + } + + /** + * Gets the ActionURL instance from the ViewBackgroundInfo. + * WARNING: Not supported if job is not running in the LabKey Server. + * + * @return the URL of the request that started the job + */ + public ActionURL getActionURL() + { + return getInfo().getURL(); + } + + /** + * Gets the ViewBackgroundInfo associated with this job in its contstructor. + * WARNING: Although this function is supported outside the LabKey Server, certain + * accessors on the ViewBackgroundInfo itself are not. + * + * @return information from the starting request, for use in background processing + */ + public ViewBackgroundInfo getInfo() + { + return _info; + } + + ///////////////////////////////////////////////////////////////////////// + // Scheduling interface + // TODO: Figure out how these apply to the Enterprise Pipeline + + protected boolean canInterrupt() + { + return false; + } + + public synchronized boolean interrupt() + { + PipelineJobService.get().cancelForJob(getJobGUID()); + if (!canInterrupt()) + return false; + _interrupted = true; + return true; + } + + public synchronized boolean checkInterrupted() + { + return _interrupted; + } + + public boolean allowMultipleSimultaneousJobs() + { + return false; + } + + synchronized public void setSubmitted() + { + _submitted = true; + notifyAll(); + } + + synchronized private boolean isSubmitted() + { + return _submitted; + } + + synchronized private void waitUntilSubmitted() + { + while (!_submitted) + { + try + { + wait(); + } + catch (InterruptedException ignored) {} + } + } + + ///////////////////////////////////////////////////////////////////////// + // JobRunner.Job interface + + @Override + public Object get() throws InterruptedException, ExecutionException + { + waitUntilSubmitted(); + return super.get(); + } + + @Override + public Object get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException + { + return get(); + } + + @Override + protected void starting(Thread thread) + { + _queue.starting(this, thread); + } + + @Override + public boolean cancel(boolean mayInterruptIfRunning) + { + if (isSubmitted()) + { + PipelineJobService.get().cancelForJob(getJobGUID()); + return super.cancel(mayInterruptIfRunning); + } + return true; + } + + @Override + public boolean isDone() + { + if (!isSubmitted()) + return false; + return super.isDone(); + } + + @Override + public boolean isCancelled() + { + if (!isSubmitted()) + return false; + return super.isCancelled(); + } + + @Override + public void done(Throwable throwable) + { + if (null != throwable) + { + try + { + error("Uncaught exception in PipelineJob: " + this, throwable); + } + catch (Exception ignored) {} + } + if (_queue != null) + { + _queue.done(this); + } + + PipelineJobNotificationProvider notificationProvider = PipelineService.get().getPipelineJobNotificationProvider(getJobNotificationProvider(), this); + if (notificationProvider != null) + notificationProvider.onJobDone(this); + + finallyCleanUpLocalDirectory(); //Since this potentially contains the job log, it should be run after the notifications tasks are executed + } + + protected String getJobNotificationProvider() + { + return null; + } + + protected String getNotificationType(PipelineJob.TaskStatus status) + { + return status.getNotificationType(); + } + + public String serializeJob(boolean ensureDeserialize) + { + return PipelineJobService.get().getJobStore().serializeToJSON(this, ensureDeserialize); + } + + public static String getClassNameFromJson(String serialized) + { + // Expect [ "org.labkey....", {.... + if (StringUtils.startsWith(serialized, "[")) + { + return StringUtils.substringBetween(serialized, "\""); + } + else + { + throw new RuntimeException("Unexpected serialized JSON"); + } + } + + @Nullable + public static PipelineJob deserializeJob(@NotNull String serialized) + { + try + { + String className = PipelineJob.getClassNameFromJson(serialized); + return PipelineJobService.get().getJobStore().deserializeFromJSON(serialized, (Class)Class.forName(className)); + } + catch (ClassNotFoundException e) + { + _log.error("Deserialized class not found.", e); + } + return null; + } + + public static ObjectMapper createObjectMapper() + { + ObjectMapper mapper = JsonUtil.DEFAULT_MAPPER.copy() + .setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.NONE) + .setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY) + .disable(SerializationFeature.FAIL_ON_EMPTY_BEANS) + .enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL); + + SimpleModule module = new SimpleModule(); + module.addSerializer(new SqlTimeSerialization.SqlTimeSerializer()); + module.addDeserializer(Time.class, new SqlTimeSerialization.SqlTimeDeserializer()); + module.addDeserializer(AtomicLong.class, new AtomicLongDeserializer()); + module.addSerializer(NullSafeBindException.class, new NullSafeBindExceptionSerializer()); + module.addSerializer(QueryKey.class, new QueryKeySerialization.Serializer()); + module.addDeserializer(SchemaKey.class, new QueryKeySerialization.SchemaKeyDeserializer()); + module.addDeserializer(FieldKey.class, new QueryKeySerialization.FieldKeyDeserializer()); + module.addSerializer(Path.class, new PathSerialization.Serializer()); + module.addDeserializer(Path.class, new PathSerialization.Deserializer()); + module.addSerializer(CronExpression.class, new CronExpressionSerialization.Serializer()); + module.addDeserializer(CronExpression.class, new CronExpressionSerialization.Deserializer()); + module.addSerializer(URI.class, new URISerialization.Serializer()); + module.addDeserializer(URI.class, new URISerialization.Deserializer()); + module.addSerializer(File.class, new FileSerialization.Serializer()); + module.addDeserializer(File.class, new FileSerialization.Deserializer()); + module.addDeserializer(Filter.class, new FilterDeserializer()); + + mapper.registerModule(module); + return mapper; + } + + public abstract static class TestSerialization extends org.junit.Assert + { + public void testSerialize(PipelineJob job, @Nullable Logger log) + { + PipelineStatusFile.JobStore jobStore = PipelineJobService.get().getJobStore(); + try + { + if (null != log) + log.info("Hi Logger is here!"); + String json = jobStore.serializeToJSON(job, true); + if (null != log) + log.info(json); + PipelineJob job2 = jobStore.deserializeFromJSON(json, job.getClass()); + if (null != log) + log.info(job2.toString()); + + List errors = job.compareJobs(job2); + if (!errors.isEmpty()) + { + fail("Pipeline objects don't match: " + StringUtils.join(errors, ",")); + } + } + catch (Exception e) + { + if (null != log) + log.error("Class not found", e); + } + } + } + + @Override + public boolean equals(Object o) + { + // Fix issue 35876: Second run of a split XTandem pipeline job not completing - don't rely on the job being + // represented in memory as a single object + if (this == o) return true; + if (!(o instanceof PipelineJob that)) return false; + return Objects.equals(_jobGUID, that._jobGUID); + } + + @Override + public int hashCode() + { + return Objects.hash(_jobGUID); + } + + public List compareJobs(PipelineJob job2) + { + PipelineJob job1 = this; + List errors = new ArrayList<>(); + if (!PropertyUtil.nullSafeEquals(job1._activeTaskId, job2._activeTaskId)) + errors.add("_activeTaskId"); + if (job1._activeTaskRetries != job2._activeTaskRetries) + errors.add("_activeTaskRetries"); + if (!PropertyUtil.nullSafeEquals(job1._activeTaskStatus, job2._activeTaskStatus)) + errors.add("_activeTaskStatus"); + if (job1._errors != job2._errors) + errors.add("_errors"); + if (job1._interrupted != job2._interrupted) + errors.add("_interrupted"); + if (!PropertyUtil.nullSafeEquals(job1._jobGUID, job2._jobGUID)) + errors.add("_jobGUID"); + if (!PropertyUtil.nullSafeEquals(job1._logFile, job2._logFile)) + { + if (null == job1._logFile || null == job2._logFile) + errors.add("_logFile"); + else if (!FileUtil.getAbsoluteCaseSensitiveFile(job1._logFile.toFile()).getAbsolutePath().equalsIgnoreCase(FileUtil.getAbsoluteCaseSensitiveFile(job2._logFile.toFile()).getAbsolutePath())) + errors.add("_logFile"); + } + if (!PropertyUtil.nullSafeEquals(job1._parentGUID, job2._parentGUID)) + errors.add("_parentGUID"); + if (!PropertyUtil.nullSafeEquals(job1._provider, job2._provider)) + errors.add("_provider"); + if (job1._submitted != job2._submitted) + errors.add("_submitted"); + + return errors; + } + + /** + * @return Path String for a local working directory, temporary if root is cloud based + */ + protected Path getWorkingDirectoryString() + { + return !getPipeRoot().isCloudRoot() ? getPipeRoot().getRootNioPath() : FileUtil.getTempDirectory().toPath(); + } + + /** + * Generate a LocalDirectory and log file, temporary if need be, for use by the job + * Note: Override getDefaultLocalDirectoryString if piperoot isn't the desired local directory + * + * @param pipeRoot Pipeline's root directory + * @param moduleName supplying the pipeline + * @param baseLogFileName base name of the log file + */ + protected final void setupLocalDirectoryAndJobLog(PipeRoot pipeRoot, String moduleName, String baseLogFileName) + { + LocalDirectory localDirectory = LocalDirectory.create(pipeRoot, baseLogFileName, getWorkingDirectoryString()); + setLocalDirectory(localDirectory); + setLogFile(localDirectory.determineLogFile()); + } +} diff --git a/api/src/org/labkey/api/pipeline/PipelineProtocol.java b/api/src/org/labkey/api/pipeline/PipelineProtocol.java index f9700804715..c1fc87fd905 100644 --- a/api/src/org/labkey/api/pipeline/PipelineProtocol.java +++ b/api/src/org/labkey/api/pipeline/PipelineProtocol.java @@ -20,16 +20,13 @@ import org.fhcrc.cpas.pipeline.protocol.xml.PipelineProtocolPropsDocument; import org.labkey.api.util.FileUtil; import org.labkey.api.util.NetworkDrive; +import org.labkey.api.writer.PrintWriters; +import org.labkey.vfs.FileLike; import java.beans.PropertyDescriptor; -import java.io.BufferedWriter; -import java.io.File; import java.io.IOException; +import java.io.PrintWriter; import java.lang.reflect.InvocationTargetException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardOpenOption; import java.util.HashMap; import java.util.Map; @@ -46,10 +43,6 @@ public abstract class PipelineProtocol private String name; private String template; - public PipelineProtocol() - { - } - public PipelineProtocol(String name) { this.name = name; @@ -65,7 +58,7 @@ public void setName(String name) this.name = name; } - public abstract PipelineProtocolFactory getFactory(); + public abstract PipelineProtocolFactory getFactory(); public void validateToSave(PipeRoot root, boolean validateName, boolean abortOnExists) throws PipelineValidationException { @@ -100,7 +93,7 @@ else if (!getFactory().isValidProtocolName(name)) throw new PipelineValidationException("The name '" + name + "' is not a valid protocol name."); } - public Path getDefinitionFile(PipeRoot root) + public FileLike getDefinitionFile(PipeRoot root) { return getFactory().getProtocolFile(root, name, false); } @@ -152,17 +145,11 @@ protected Map getSaveProperties() return propMap; } - @Deprecated - public void save(File file) throws IOException - { - save(file.toPath()); - } - - private void ensureDir(Path dir) throws IOException + private void ensureDir(FileLike dir) throws IOException { try { - if (!Files.exists(dir)) + if (!dir.exists()) { FileUtil.createDirectories(dir); } @@ -173,9 +160,9 @@ private void ensureDir(Path dir) throws IOException } } - public void save(Path file) throws IOException + public void save(FileLike file) throws IOException { - Path dir = file.getParent(); + FileLike dir = file.getParent(); try { ensureDir(dir); @@ -210,9 +197,9 @@ public void save(Path file) throws IOException XmlOptions opts = new XmlOptions() .setSavePrettyPrint() .setSaveImplicitNamespaces(mapNS); - try (BufferedWriter bfw = Files.newBufferedWriter(file, StandardCharsets.UTF_8, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)) + try (PrintWriter pw = PrintWriters.getPrintWriter(file.toNioPathForWrite())) { - doc.save(bfw, opts); + doc.save(pw, opts); } } diff --git a/api/src/org/labkey/api/pipeline/PipelineProtocolFactory.java b/api/src/org/labkey/api/pipeline/PipelineProtocolFactory.java index 97e4d4dcba6..94dced631af 100644 --- a/api/src/org/labkey/api/pipeline/PipelineProtocolFactory.java +++ b/api/src/org/labkey/api/pipeline/PipelineProtocolFactory.java @@ -21,14 +21,15 @@ import org.fhcrc.cpas.pipeline.protocol.xml.PipelineProtocolPropsDocument; import org.labkey.api.util.FileUtil; import org.labkey.api.util.NetworkDrive; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.IOException; import java.nio.file.Files; -import java.nio.file.Path; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; /** @@ -44,10 +45,10 @@ public abstract class PipelineProtocolFactory private static final Logger LOG = LogManager.getLogger(PipelineProtocolFactory.class); - public static Path getProtocolRootDir(PipeRoot root) + public static FileLike getProtocolRootDir(PipeRoot root) { - Path systemDir = root.ensureSystemDirectoryPath(); - return systemDir.resolve(_pipelineProtocolDir); + FileLike systemDir = root.ensureSystemDirectory(); + return systemDir.resolveChild(_pipelineProtocolDir); } public static File locateProtocolRootDir(File rootDir, File systemDir) @@ -63,7 +64,7 @@ public static File locateProtocolRootDir(File rootDir, File systemDir) public T load(PipeRoot root, String name, boolean archived) throws IOException { - Path file = getProtocolFile(root, name, archived); + FileLike file = getProtocolFile(root, name, archived); try { Map mapNS = new HashMap<>(); @@ -71,7 +72,7 @@ public T load(PipeRoot root, String name, boolean archived) throws IOException XmlOptions opts = new XmlOptions().setLoadSubstituteNamespaces(mapNS); PipelineProtocolPropsDocument doc = - PipelineProtocolPropsDocument.Factory.parse(Files.newInputStream(file), opts); + PipelineProtocolPropsDocument.Factory.parse(file.openInputStream(), opts); PipelineProtocolPropsDocument.PipelineProtocolProps ppp = doc.getPipelineProtocolProps(); String type = ppp.getType(); @@ -104,7 +105,7 @@ public T load(PipeRoot root, String name, boolean archived) throws IOException } catch (Exception e) { - throw new IOException("Failed to load protocol document " + file.toAbsolutePath() + ".", e); + throw new IOException("Failed to load protocol document " + file + ".", e); } } @@ -115,48 +116,42 @@ public boolean isValidProtocolName(String name) public boolean exists(PipeRoot root, String name, boolean archived) { - return Files.exists(getProtocolFile(root, name, archived)); + return getProtocolFile(root, name, archived).exists(); } - public Path getProtocolDir(PipeRoot root, boolean archived) + public FileLike getProtocolDir(PipeRoot root, boolean archived) { - Path protocolDir = getProtocolRootDir(root).resolve(getName()); + FileLike protocolDir = getProtocolRootDir(root).resolveChild(getName()); if (archived) - protocolDir = protocolDir.resolve(_archivedProtocolDir); + protocolDir = protocolDir.resolveChild(_archivedProtocolDir); return protocolDir; } - public Path getProtocolFile(PipeRoot root, String name, boolean archived) + public FileLike getProtocolFile(PipeRoot root, String name, boolean archived) { - return FileUtil.appendName(getProtocolDir(root, archived), name + ".xml"); + return getProtocolDir(root, archived).resolveChild(name + ".xml"); } /** @return sorted list of protocol names */ - public String[] getProtocolNames(PipeRoot root, Path dirData, boolean archived) + public String[] getProtocolNames(PipeRoot root, FileLike dirData, boolean archived) { HashSet setNames = new HashSet<>(); // Add .xml files - File[] files = getProtocolDir(root, archived).toFile().listFiles(f -> f.getName().endsWith(".xml") && !f.isDirectory()); - if (files != null) + List files = getProtocolDir(root, archived).getChildren(f -> f.getName().endsWith(".xml") && !f.isDirectory()); + for (FileLike file : files) { - for (File file : files) - { - final String name = file.getName(); - setNames.add(name.substring(0, name.lastIndexOf('.'))); - } + final String name = file.getName(); + setNames.add(name.substring(0, name.lastIndexOf('.'))); } // Add all directories that already exist in the analysis root. if (dirData != null && !archived) { - files = dirData.resolve(getName()).toFile().listFiles(File::isDirectory); + files = dirData.resolveChild(getName()).getChildren(FileLike::isDirectory); - if (files != null) - { - for (File file : files) - setNames.add(file.getName()); - } + for (FileLike file : files) + setNames.add(file.getName()); } String[] vals = setNames.toArray(new String[0]); @@ -179,22 +174,22 @@ public boolean changeArchiveStatus(PipeRoot root, String name, boolean moveToArc { if (moveToArchive) { - Path archiveDir = getProtocolDir(root, true); - if (!Files.exists(archiveDir)) + FileLike archiveDir = getProtocolDir(root, true); + if (!archiveDir.exists()) { FileUtil.createDirectories(archiveDir); } - else if (Files.isRegularFile(archiveDir)) + else if (archiveDir.isFile()) { LOG.error("Unable to create archived directory because a file with that name exists in the protocol directory: " - + getProtocolDir(root, false).toAbsolutePath()); + + getProtocolDir(root, false)); return false; } } try { - Files.move(getProtocolFile(root, name, !moveToArchive), getProtocolFile(root, name, moveToArchive)); + Files.move(getProtocolFile(root, name, !moveToArchive).toNioPathForWrite(), getProtocolFile(root, name, moveToArchive).toNioPathForWrite()); } catch (IOException e) { @@ -215,25 +210,25 @@ else if (Files.isRegularFile(archiveDir)) */ public boolean deleteProtocolFile(PipeRoot root, String name) { - Path protocolFile = getProtocolFile(root, name, false); + FileLike protocolFile = getProtocolFile(root, name, false); //If it doesn't exist, check archive - if (!Files.exists(protocolFile)) + if (!protocolFile.exists()) protocolFile = getProtocolFile(root, name, true); //If it still doesn't exist, move on - if (!Files.exists(protocolFile)) + if (!protocolFile.exists()) { return true; // We don't care if the file doesn't exist } try { - return Files.deleteIfExists(protocolFile); + return protocolFile.delete(); } catch (IOException e) { - LogManager.getLogger(PipelineProtocolFactory.class).debug("Error attempting to delete protocol file " + protocolFile, e); + LOG.debug("Error attempting to delete protocol file " + protocolFile, e); return false; } } diff --git a/api/src/org/labkey/api/pipeline/PipelineProvider.java b/api/src/org/labkey/api/pipeline/PipelineProvider.java index a5cadc1a1a3..618b46eed5a 100644 --- a/api/src/org/labkey/api/pipeline/PipelineProvider.java +++ b/api/src/org/labkey/api/pipeline/PipelineProvider.java @@ -208,13 +208,6 @@ public String getName() * @param rootDir the pipeline root directory on disk * @param systemDir the system directory itself */ - @Deprecated - public void initSystemDirectory(File rootDir, File systemDir) - { - if (null != rootDir && null != systemDir) - initSystemDirectory(rootDir.toPath(), systemDir.toPath()); - } - public void initSystemDirectory(Path rootDir, Path systemDir) { } diff --git a/api/src/org/labkey/api/pipeline/PipelineService.java b/api/src/org/labkey/api/pipeline/PipelineService.java index 0bd346a4781..f87384fb70d 100644 --- a/api/src/org/labkey/api/pipeline/PipelineService.java +++ b/api/src/org/labkey/api/pipeline/PipelineService.java @@ -33,6 +33,7 @@ import org.labkey.api.view.HttpView; import org.labkey.api.view.ViewBackgroundInfo; import org.labkey.api.view.ViewContext; +import org.labkey.vfs.FileLike; import org.springframework.validation.BindException; import java.io.File; @@ -195,7 +196,7 @@ void rememberLastProtocolSetting(PipelineProtocolFactory factory, Container c TableInfo getJobsTable(User user, Container container, @Nullable ContainerFilter cf); - boolean runFolderImportJob(Container c, User user, ActionURL url, Path folderXml, String originalFilename, PipeRoot pipelineRoot, ImportOptions options); + boolean runFolderImportJob(Container c, User user, ActionURL url, FileLike folderXml, String originalFilename, PipeRoot pipelineRoot, ImportOptions options); /** * Register a folder archive source implementation. A FolderArchiveSource creates folder artifacts that can be @@ -224,10 +225,10 @@ void rememberLastProtocolSetting(PipelineProtocolFactory factory, Container c class PathAnalysisProperties { private final PipeRoot _pipeRoot; - private final Path _dirData; + private final FileLike _dirData; private final AbstractFileAnalysisProtocolFactory _factory; - public PathAnalysisProperties(PipeRoot pipeRoot, Path dirData, AbstractFileAnalysisProtocolFactory factory) + public PathAnalysisProperties(PipeRoot pipeRoot, FileLike dirData, AbstractFileAnalysisProtocolFactory factory) { _pipeRoot = pipeRoot; _dirData = dirData; @@ -240,7 +241,7 @@ public PipeRoot getPipeRoot() } @Nullable - public Path getDirData() + public FileLike getDirData() { return _dirData; } diff --git a/api/src/org/labkey/api/pipeline/PipelineStatusFile.java b/api/src/org/labkey/api/pipeline/PipelineStatusFile.java index e5337df65a8..63a656e2ccc 100644 --- a/api/src/org/labkey/api/pipeline/PipelineStatusFile.java +++ b/api/src/org/labkey/api/pipeline/PipelineStatusFile.java @@ -17,6 +17,7 @@ import org.jetbrains.annotations.Nullable; import org.labkey.api.data.Container; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.IOException; @@ -37,6 +38,10 @@ interface StatusReader @Deprecated PipelineStatusFile getStatusFile(File logFile); PipelineStatusFile getStatusFile(Container container, Path logFile); + default PipelineStatusFile getStatusFile(Container container, FileLike logFile) + { + return getStatusFile(container, logFile.toNioPathForRead()); + } PipelineStatusFile getStatusFile(long rowId); diff --git a/api/src/org/labkey/api/pipeline/PipelineUrls.java b/api/src/org/labkey/api/pipeline/PipelineUrls.java index e292f64de05..c0c098db078 100644 --- a/api/src/org/labkey/api/pipeline/PipelineUrls.java +++ b/api/src/org/labkey/api/pipeline/PipelineUrls.java @@ -22,6 +22,7 @@ import org.labkey.api.data.Container; import org.labkey.api.util.URLHelper; import org.labkey.api.view.ActionURL; +import org.labkey.vfs.FileLike; import java.nio.file.Path; @@ -41,7 +42,7 @@ public interface PipelineUrls extends UrlProvider ActionURL urlActions(Container container); - ActionURL urlStartFolderImport(Container container, @NotNull Path archiveFile, @Nullable ImportOptions options, boolean fromTemplateSourceFolder); + ActionURL urlStartFolderImport(Container container, @NotNull FileLike archiveFile, @Nullable ImportOptions options, boolean fromTemplateSourceFolder); ActionURL urlCreatePipelineTrigger(Container container, String pipelineId, @Nullable ActionURL returnUrl); diff --git a/api/src/org/labkey/api/pipeline/RecordedAction.java b/api/src/org/labkey/api/pipeline/RecordedAction.java index c895b3681ff..23abf52bcdb 100644 --- a/api/src/org/labkey/api/pipeline/RecordedAction.java +++ b/api/src/org/labkey/api/pipeline/RecordedAction.java @@ -21,6 +21,7 @@ import org.labkey.api.exp.PropertyType; import org.labkey.api.util.FileUtil; import org.labkey.api.util.Pair; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.Serializable; @@ -93,6 +94,11 @@ public void addInput(File input, String role) addInput(input.toURI(), role); } + public void addInput(FileLike input, String role) + { + addInput(input.toNioPathForRead().toFile(), role); + } + private boolean uriExists(URI toTest, Set set) { for (DataFile df : set) diff --git a/api/src/org/labkey/api/pipeline/RecordedActionSet.java b/api/src/org/labkey/api/pipeline/RecordedActionSet.java index cc03d80a71f..d0496a22f9e 100644 --- a/api/src/org/labkey/api/pipeline/RecordedActionSet.java +++ b/api/src/org/labkey/api/pipeline/RecordedActionSet.java @@ -19,9 +19,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import org.labkey.vfs.FileLike; import java.net.URI; -import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; @@ -84,9 +84,9 @@ public Map getOtherInputs() return _otherInputs; } - public void add(Path inputFile, String inputRole) + public void add(FileLike inputFile, String inputRole) { - _otherInputs.put(inputFile.toUri(), inputRole); + _otherInputs.put(inputFile.toNioPathForRead().toUri(), inputRole); } public void add(RecordedActionSet set) diff --git a/api/src/org/labkey/api/pipeline/WorkDirectory.java b/api/src/org/labkey/api/pipeline/WorkDirectory.java index be806a421e9..58b90294576 100644 --- a/api/src/org/labkey/api/pipeline/WorkDirectory.java +++ b/api/src/org/labkey/api/pipeline/WorkDirectory.java @@ -17,6 +17,7 @@ import org.labkey.api.pipeline.cmd.TaskPath; import org.labkey.api.util.FileType; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.IOException; @@ -66,6 +67,12 @@ enum Function { */ File inputFile(File fileInput, boolean forceCopy) throws IOException; + default File inputFile(FileLike fileInput, boolean forceCopy) throws IOException + { + return inputFile(fileInput.toNioPathForRead().toFile(), forceCopy); + } + + /** * Indicates that a file is to be used as input. The implementation can choose whether it needs to be copied, unless * forceCopy is true (in which case it will always be copied to the work directory. This version of the method allows the caller diff --git a/api/src/org/labkey/api/pipeline/browse/PipelinePathForm.java b/api/src/org/labkey/api/pipeline/browse/PipelinePathForm.java index b037848edf1..7db45135681 100644 --- a/api/src/org/labkey/api/pipeline/browse/PipelinePathForm.java +++ b/api/src/org/labkey/api/pipeline/browse/PipelinePathForm.java @@ -21,13 +21,11 @@ import org.labkey.api.pipeline.PipeRoot; import org.labkey.api.pipeline.PipelineService; import org.labkey.api.security.permissions.ReadPermission; -import org.labkey.api.util.FileUtil; import org.labkey.api.util.NetworkDrive; import org.labkey.api.view.NotFoundException; import org.labkey.api.view.ViewForm; +import org.labkey.vfs.FileLike; -import java.io.File; -import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; @@ -91,16 +89,16 @@ public void setFileIds(int[] fileIds) * For ExpData IDs provided, ensures the files exists and the user has read permission on the associated container. The files do not need to be located in the same directory. * Throws NotFoundException if no files are specified, invalid files are specified, there's no pipeline root, etc. */ - public List getValidatedFiles(Container c) + public List getValidatedFiles(Container c) { return getValidatedFiles(c, false); } - public List getValidatedFiles(Container c, boolean allowNonExistentFiles) + public List getValidatedFiles(Container c, boolean allowNonExistentFiles) { PipeRoot pr = getPipeRoot(c); - File dir = pr.resolvePath(getPath()); + FileLike dir = pr.resolvePathToFileLike(getPath()); if (dir == null || !dir.exists()) throw new NotFoundException("Could not find path " + getPath()); @@ -109,10 +107,10 @@ public List getValidatedFiles(Container c, boolean allowNonExistentFiles) throw new NotFoundException("No files specified"); } - List result = new ArrayList<>(); + List result = new ArrayList<>(); for (String fileName : _file) { - File f = pr.resolvePath(getPath() + "/" + fileName); + FileLike f = pr.resolvePathToFileLike(getPath() + "/" + fileName); if (!allowNonExistentFiles && !NetworkDrive.exists(f)) { throw new NotFoundException("Could not find file '" + fileName + "' in '" + getPath() + "'"); @@ -136,7 +134,7 @@ public List getValidatedFiles(Container c, boolean allowNonExistentFiles) throw new NotFoundException("Insufficient permissions for file '" + data.getFile()); } - File file = data.getFile(); + FileLike file = data.getFileLike(); if (!allowNonExistentFiles && !NetworkDrive.exists(file)) { throw new NotFoundException("Could not find file '" + file + "'"); @@ -150,55 +148,12 @@ public List getValidatedFiles(Container c, boolean allowNonExistentFiles) public List getValidatedPaths(Container c, boolean allowNonExistentFiles) { - PipeRoot pr = getPipeRoot(c); - - Path dir = pr.resolveToNioPath(getPath()); - if (dir == null || !Files.exists(dir)) - throw new NotFoundException("Could not find path " + getPath()); - - if ((getFile() == null || getFile().length == 0) && (getFileIds() == null || getFileIds().length == 0)) - { - throw new NotFoundException("No files specified"); - } - + List files = getValidatedFiles(c, allowNonExistentFiles); List result = new ArrayList<>(); - for (String fileName : _file) + for (FileLike file : files) { - Path path = pr.resolveToNioPath(getPath() + "/" + fileName); - if (!allowNonExistentFiles && (null == path || !Files.exists(path))) - { - throw new NotFoundException("Could not find file '" + fileName + "' in '" + getPath() + "'"); - } - if (null != path) - result.add(path); - } - - ExperimentService es = ExperimentService.get(); - if (_fileIds != null) - { - for (int fileId : _fileIds) - { - ExpData data = es.getExpData(fileId); - if(data == null) - { - throw new NotFoundException("Could not find file associated with Data Id: '" + fileId); - } - - if (!data.getContainer().hasPermission(getUser(), ReadPermission.class)) - { - throw new NotFoundException("Insufficient permissions for file '" + data.getFile()); - } - - Path path = pr.resolveToNioPath(data.getDataFileURI().getPath()); - if (!allowNonExistentFiles && (null == path || !Files.exists(path))) - { - throw new NotFoundException("Could not find file '" + FileUtil.getFileName(path) + "'"); - } - if (null != path) - result.add(path); - } + result.add(file.toNioPathForRead()); } - return result; } @@ -211,13 +166,18 @@ public PipeRoot getPipeRoot(Container c) } /** Verifies that only a single file was selected and returns it, throwing an exception if there isn't exactly one */ - @Deprecated //prefer the nio.Path version: getValidatedSinglePath - public File getValidatedSingleFile(Container c) + public FileLike getValidatedSingleFile(Container c) { - return getValidatedSinglePath(c).toFile(); + List files = getValidatedFiles(c); + if (files.size() != 1) + { + throw new IllegalArgumentException("Expected a single file but got " + files.size()); + } + return files.get(0); } /** Verifies that only a single file was selected and returns it, throwing an exception if there isn't exactly one */ + @Deprecated // use the FileLike version public Path getValidatedSinglePath(Container c) { List files = getValidatedPaths(c, false); diff --git a/api/src/org/labkey/api/pipeline/file/AbstractFileAnalysisJob.java b/api/src/org/labkey/api/pipeline/file/AbstractFileAnalysisJob.java index 766e2421a95..6ff08013dc0 100644 --- a/api/src/org/labkey/api/pipeline/file/AbstractFileAnalysisJob.java +++ b/api/src/org/labkey/api/pipeline/file/AbstractFileAnalysisJob.java @@ -15,10 +15,10 @@ */ package org.labkey.api.pipeline.file; +import io.micrometer.common.util.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; import org.labkey.api.exp.PropertyType; import org.labkey.api.exp.api.ExpRun; import org.labkey.api.exp.api.ExperimentService; @@ -36,10 +36,10 @@ import org.labkey.api.util.PageFlowUtil; import org.labkey.api.view.ActionURL; import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.IOException; -import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; @@ -61,11 +61,10 @@ abstract public class AbstractFileAnalysisJob extends PipelineJob implements Fil private String _protocolName; private String _joinedBaseName; private String _baseName; - private Path _dirData; - private Path _dirAnalysis; - private Path _fileParameters; - private Path _fileJobInfo; - private List _filesInput; + private FileLike _dirData; + private FileLike _dirAnalysis; + private FileLike _fileParameters; + private List _filesInput; private List _inputTypes; private boolean _splittable = true; @@ -77,39 +76,14 @@ abstract public class AbstractFileAnalysisJob extends PipelineJob implements Fil // For serialization protected AbstractFileAnalysisJob() {} - @Deprecated //Prefer the Path version, retained for backwards compatbility - public AbstractFileAnalysisJob(AbstractFileAnalysisProtocol protocol, + public AbstractFileAnalysisJob(@NotNull AbstractFileAnalysisProtocol protocol, String providerName, ViewBackgroundInfo info, PipeRoot root, String protocolName, - File fileParameters, - List filesInput, - boolean splittable, - boolean writeJobInfoFile) throws IOException - { - this( - protocol, - providerName, - info, - root, - protocolName, - fileParameters.toPath(), - filesInput.stream().map(File::toPath).collect(Collectors.toList()), - splittable, - writeJobInfoFile - ); - } - - public AbstractFileAnalysisJob(@NotNull AbstractFileAnalysisProtocol protocol, - String providerName, - ViewBackgroundInfo info, - PipeRoot root, - String protocolName, - Path fileParameters, - List filesInput, - boolean splittable, - boolean writeJobInfoFile) throws IOException + FileLike fileParameters, + List filesInput, + boolean splittable) throws IOException { super(providerName, info, root); @@ -127,13 +101,13 @@ public AbstractFileAnalysisJob(@NotNull AbstractFileAnalysisProtocol protocol, // Check for explicitly set default parameters. Otherwise use the default. String paramDefaults = _parametersOverrides.get("list path, default parameters"); - Path fileDefaults; + FileLike fileDefaults; if (paramDefaults != null) - fileDefaults = getPipeRoot().resolveToNioPath(paramDefaults); + fileDefaults = getPipeRoot().resolvePathToFileLike(paramDefaults); else fileDefaults = protocol.getFactory().getDefaultParametersFile(root); - _parametersDefaults = fileDefaults != null && Files.exists(fileDefaults) ? + _parametersDefaults = fileDefaults != null && fileDefaults.exists() ? getInputParameters(fileDefaults).getInputParameters() : Collections.emptyMap(); @@ -164,15 +138,15 @@ public AbstractFileAnalysisJob(@NotNull AbstractFileAnalysisProtocol protocol, @Override protected Path getWorkingDirectoryString() { - return _dirAnalysis.toAbsolutePath(); + return _dirAnalysis.toNioPathForWrite().toAbsolutePath(); } - public AbstractFileAnalysisJob(AbstractFileAnalysisJob job, File fileInput) + public AbstractFileAnalysisJob(AbstractFileAnalysisJob job, FileLike fileInput) { this(job, Collections.singletonList(fileInput)); } - public AbstractFileAnalysisJob(AbstractFileAnalysisJob job, List filesInput) + public AbstractFileAnalysisJob(AbstractFileAnalysisJob job, List filesInput) { super(job); @@ -188,7 +162,7 @@ public AbstractFileAnalysisJob(AbstractFileAnalysisJob job, List filesInpu _joinedBaseName = job._joinedBaseName; // Change parameters which are specific to the fraction job. - _filesInput = filesInput.stream().map(File::toPath).collect(Collectors.toList()); + _filesInput = new ArrayList<>(filesInput); _inputTypes = FileType.findTypes(job._inputTypes, _filesInput); _baseName = (_inputTypes.isEmpty() ? filesInput.get(0).getName() : _inputTypes.get(0).getBaseName(filesInput.get(0))); @@ -222,7 +196,7 @@ public List createSplitJobs() return super.createSplitJobs(); ArrayList jobs = new ArrayList<>(); - for (File file : getInputFiles()) + for (FileLike file : _filesInput) jobs.add(createSingleFileJob(file)); return Collections.unmodifiableList(jobs); } @@ -235,7 +209,7 @@ public TaskPipeline getTaskPipeline() abstract public TaskId getTaskPipelineId(); - abstract public AbstractFileAnalysisJob createSingleFileJob(File file); + abstract public AbstractFileAnalysisJob createSingleFileJob(FileLike file); @Override public String getProtocolName() @@ -259,7 +233,7 @@ public String getJoinedBaseName() public List getSplitBaseNames() { ArrayList baseNames = new ArrayList<>(); - for (Path fileInput : _filesInput) + for (FileLike fileInput : _filesInput) { for (FileType ft : _inputTypes) { @@ -278,7 +252,7 @@ public String getBaseNameForFileType(FileType fileType) { if (fileType != null) { - for (Path fileInput : _filesInput) + for (FileLike fileInput : _filesInput) { if (fileType.isType(fileInput)) return fileType.getBaseName(fileInput); @@ -289,13 +263,7 @@ public String getBaseNameForFileType(FileType fileType) } @Override - public File getDataDirectory() - { - return _dirData.toFile(); - } - - @Override - public Path getDataDirectoryPath() + public FileLike getDataDirectoryFileLike() { return _dirData; } @@ -303,13 +271,7 @@ public Path getDataDirectoryPath() @Override public File getAnalysisDirectory() { - return _dirAnalysis.toFile(); - } - - @Override - public Path getAnalysisDirectoryPath() - { - return _dirAnalysis; + return _dirAnalysis.toNioPathForWrite().toFile(); } @Override @@ -357,28 +319,13 @@ public List getInputFiles() @Override public List getInputFilePaths() { - return _filesInput; - } - - @Override - @Nullable - public File getJobInfoFile() - { - return _fileJobInfo.toFile(); - } - - - @Override - @Nullable - public Path getJobInfoFilePath() - { - return _fileJobInfo; + return _filesInput.stream().map(FileLike::toNioPathForRead).toList(); } @Override public File getParametersFile() { - return _fileParameters.toFile(); + return _fileParameters.toNioPathForRead().toFile(); } @Override @@ -407,10 +354,10 @@ public ParamParser getInputParameters() throws IOException return getInputParameters(_fileParameters); } - public ParamParser getInputParameters(Path parametersFile) throws IOException + public ParamParser getInputParameters(FileLike parametersFile) throws IOException { ParamParser parser = createParamParser(); - parser.parse(Files.newInputStream(parametersFile)); + parser.parse(parametersFile.openInputStream()); if (parser.getErrors() != null) { ParamParser.Error err = parser.getErrors()[0]; @@ -428,7 +375,7 @@ public ParamParser getInputParameters(Path parametersFile) throws IOException return parser; } - private void logParameters(String description, Path file, Map parameters) + private void logParameters(String description, FileLike file, Map parameters) { _log.debug(description + " " + parameters.size() + " parameters (" + file + "):"); for (Map.Entry entry : new TreeMap<>(parameters).entrySet()) @@ -445,7 +392,7 @@ public ParamParser createParamParser() @Override public String getDescription() { - return getDataDescription(getDataDirectoryPath(), getBaseName(), getJoinedBaseName(), getProtocolName(), getInputFilePaths()); + return getDataDescription(getDataDirectoryFileLike(), getBaseName(), getJoinedBaseName(), getProtocolName(), _filesInput); } @Override @@ -460,25 +407,19 @@ public ActionURL getStatusHref() return null; } - @Deprecated //prefer Path version - public static String getDataDescription(File dirData, String baseName, String joinedBaseName, String protocolName) - { - return getDataDescription(dirData.toPath(), baseName, joinedBaseName, protocolName, Collections.emptyList()); - } - - public static String getDataDescription(Path dirData, String baseName, String joinedBaseName, String protocolName, List inputFiles) + public static String getDataDescription(FileLike dirData, String baseName, String joinedBaseName, String protocolName, List inputFiles) { String dataName = ""; if (dirData != null) { - dataName = dirData.getFileName().toString(); + dataName = dirData.getName(); // Can't remember why we would ever need the "xml" check. We may get an extra "." in the path, // so check for that and remove it. if (".".equals(dataName) || "xml".equals(dataName)) { dirData = dirData.getParent(); if (dirData != null) - dataName = dirData.getFileName().toString(); + dataName = dirData.getName(); } } @@ -490,14 +431,17 @@ public static String getDataDescription(Path dirData, String baseName, String jo description.append("/"); description.append(baseName); } - description.append(" (").append(protocolName).append(")"); + if (!StringUtils.isEmpty(protocolName)) + { + description.append(" (").append(protocolName).append(")"); + } // input files if (!inputFiles.isEmpty()) { description.append(" ("); //p.getFileName returns the full S3 path -- S3fs bug? - description.append(inputFiles.stream().map(FileUtil::getFileName).collect(Collectors.joining(","))); + description.append(inputFiles.stream().map(FileLike::getName).collect(Collectors.joining(","))); description.append(")"); } return description.toString(); diff --git a/api/src/org/labkey/api/pipeline/file/AbstractFileAnalysisProtocol.java b/api/src/org/labkey/api/pipeline/file/AbstractFileAnalysisProtocol.java index a75a2b01925..14c20e885f1 100644 --- a/api/src/org/labkey/api/pipeline/file/AbstractFileAnalysisProtocol.java +++ b/api/src/org/labkey/api/pipeline/file/AbstractFileAnalysisProtocol.java @@ -32,6 +32,7 @@ import org.labkey.api.util.XmlBeansUtil; import org.labkey.api.view.ViewBackgroundInfo; import org.labkey.api.writer.PrintWriters; +import org.labkey.vfs.FileLike; import org.xml.sax.InputSource; import javax.xml.parsers.DocumentBuilder; @@ -47,7 +48,6 @@ import java.io.StringReader; import java.io.StringWriter; import java.nio.charset.Charset; -import java.nio.file.Files; import java.nio.file.Path; import java.util.HashMap; import java.util.List; @@ -141,32 +141,21 @@ public String getJoinedBaseName() return getName(); } - public String getBaseName(File file) - { - return getBaseName(file.toPath()); - } - - public String getBaseName(Path file) + public String getBaseName(FileLike file) { FileType ft = findInputType(file); if (ft == null) - return file.getFileName().toString(); + return file.getName(); return ft.getBaseName(file); } - public File getAnalysisDir(File dirData, PipeRoot root) - { - return getFactory().getAnalysisDir(dirData.toPath(), getName(), root).toFile(); - } - - @Deprecated //Prefer Path version - public File getParametersFile(File dirData, PipeRoot root) + public FileLike getAnalysisDir(FileLike dirData, PipeRoot root) { - return getParametersFile(dirData.toPath(), root).toFile(); + return getFactory().getAnalysisDir(dirData, getName(), root); } - public Path getParametersFile(Path dirData, PipeRoot root) + public FileLike getParametersFile(FileLike dirData, PipeRoot root) { return getFactory().getParametersFile(dirData, getName(), root); } @@ -177,20 +166,14 @@ public void saveDefinition(PipeRoot root) throws IOException save(getFactory().getProtocolFile(root, getName(), false), null, null); } - @Deprecated //Prefer Path version - public void saveInstance(File file, Container c) throws IOException - { - saveInstance(file.toPath(), c); - } - - public void saveInstance(Path file, Container c) throws IOException + public void saveInstance(FileLike file, Container c) throws IOException { Map addParams = new HashMap<>(); addParams.put(PipelineJob.PIPELINE_EMAIL_ADDRESS_PARAM, email); save(file, null, addParams); } - protected void save(Path file, Map addParams, Map instanceParams) throws IOException + protected void save(FileLike file, Map addParams, Map instanceParams) throws IOException { if (xml == null || xml.isEmpty()) { @@ -210,8 +193,8 @@ protected void save(Path file, Map addParams, Map addParams, Map getFactory(); public abstract JOB createPipelineJob(ViewBackgroundInfo info, - PipeRoot root, List filesInput, - Path fileParameters, @Nullable Map variableMap) throws IOException; + PipeRoot root, List filesInput, + FileLike fileParameters, @Nullable Map variableMap) throws IOException; public boolean timestampLog() { diff --git a/api/src/org/labkey/api/pipeline/file/AbstractFileAnalysisProtocolFactory.java b/api/src/org/labkey/api/pipeline/file/AbstractFileAnalysisProtocolFactory.java index 28fe1bf96e7..da9df32ae1b 100644 --- a/api/src/org/labkey/api/pipeline/file/AbstractFileAnalysisProtocolFactory.java +++ b/api/src/org/labkey/api/pipeline/file/AbstractFileAnalysisProtocolFactory.java @@ -25,28 +25,23 @@ import org.labkey.api.pipeline.PipelineJob; import org.labkey.api.pipeline.PipelineJobService; import org.labkey.api.pipeline.PipelineProtocolFactory; -import org.labkey.api.pipeline.PipelineProvider; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.pipeline.TaskPipeline; +import org.labkey.api.reader.Readers; import org.labkey.api.util.FileUtil; import org.labkey.api.util.NetworkDrive; import org.labkey.api.util.PageFlowUtil; import org.labkey.api.util.logging.LogHelper; +import org.labkey.api.writer.PrintWriters; +import org.labkey.vfs.FileLike; import java.io.BufferedReader; -import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; +import java.io.PrintWriter; import java.io.Reader; import java.io.StringReader; -import java.nio.charset.Charset; -import java.nio.file.Files; import java.nio.file.InvalidPathException; -import java.nio.file.Path; -import java.nio.file.StandardOpenOption; -import java.util.List; /** * Base class for protocol factories that are primarily focused on analyzing data files (as opposed to other types of resources) @@ -96,13 +91,13 @@ public String getLegacyDefaultParametersFileName() * @param root pipeline root under which the files are stored * @return analysis directory */ - public Path getAnalysisDir(Path dirData, String protocolName, PipeRoot root) + public FileLike getAnalysisDir(FileLike dirData, String protocolName, PipeRoot root) { - Path defaultFile = dirData.resolve(getName()).resolve(protocolName); + FileLike defaultFile = dirData.resolveChild(getName()).resolveChild(protocolName); // Check if the pipeline root wants us to write somewhere else, because the source file might be in a read-only // pipeline location String relativePath = root.relativePath(defaultFile); - return root.resolveToNioPath(relativePath); + return root.resolvePathToFileLike(relativePath); } /** @@ -113,12 +108,6 @@ public boolean isProtocolTypeFile(File file) return NetworkDrive.exists(new File(file.getParent(), getParametersFileName())); } - @Deprecated - public File getParametersFile(@Nullable File dirData, String protocolName, PipeRoot root) - { - Path result = getParametersFile(dirData == null? null : dirData.toPath(), protocolName, root); - return result != null ? result.toFile() : null; - } /** * Get the parameters file location, given a directory containing the mass spec data. * @@ -128,17 +117,17 @@ public File getParametersFile(@Nullable File dirData, String protocolName, PipeR * @return parameters file */ @Nullable - public Path getParametersFile(@Nullable Path dirData, String protocolName, PipeRoot root) + public FileLike getParametersFile(@Nullable FileLike dirData, String protocolName, PipeRoot root) { if (dirData == null) { return null; } - Path defaultFile = getAnalysisDir(dirData, protocolName, root).resolve(getParametersFileName()); + FileLike defaultFile = getAnalysisDir(dirData, protocolName, root).resolveChild(getParametersFileName()); // Check if the pipeline root wants us to write somewhere else, because the source file might be in a read-only // pipeline location String relativePath = root.relativePath(defaultFile); - return root.resolveToNioPath(relativePath); + return root.resolvePathToFileLike(relativePath); } /** @@ -147,9 +136,9 @@ public Path getParametersFile(@Nullable Path dirData, String protocolName, PipeR * @param root pipeline root directory * @return default parameters file */ - public Path getDefaultParametersFile(PipeRoot root) + public FileLike getDefaultParametersFile(PipeRoot root) { - return getProtocolDir(root, false).resolve(getDefaultParametersFileName()); + return getProtocolDir(root, false).resolveChild(getDefaultParametersFileName()); } /** @@ -164,7 +153,7 @@ public void ensureDefaultParameters(PipeRoot root) throws IOException } @Override - public String[] getProtocolNames(PipeRoot root, Path dirData, boolean archived) + public String[] getProtocolNames(PipeRoot root, FileLike dirData, boolean archived) { String[] protocolNames = super.getProtocolNames(root, dirData, archived); @@ -224,10 +213,10 @@ public T load(PipeRoot root, String name, boolean archived) throws IOException return instance; } - public T loadInstance(Path file, Container container) throws IOException + public T loadInstance(FileLike file, Container container) throws IOException { ParamParser parser = createParamParser(); - try (InputStream is = Files.newInputStream(file)) + try (InputStream is = file.openInputStream()) { parser.parse(is); if (parser.getErrors() != null) @@ -251,8 +240,8 @@ public T loadInstance(Path file, Container container) throws IOException public String getDefaultParametersXML(PipeRoot root) throws IOException { - Path fileDefault = getDefaultParametersFile(root); - if (!Files.exists(fileDefault)) + FileLike fileDefault = getDefaultParametersFile(root); + if (!fileDefault.exists()) return null; return new FileDefaultsReader(fileDefault).readXML(); @@ -260,9 +249,9 @@ public String getDefaultParametersXML(PipeRoot root) throws IOException protected static class FileDefaultsReader extends DefaultsReader { - private final Path _fileDefaults; + private final FileLike _fileDefaults; - public FileDefaultsReader(Path fileDefaults) + public FileDefaultsReader(FileLike fileDefaults) { _fileDefaults = fileDefaults; } @@ -270,7 +259,7 @@ public FileDefaultsReader(Path fileDefaults) @Override public Reader createReader() throws IOException { - return Files.newBufferedReader(_fileDefaults, Charset.defaultCharset()); + return Readers.getReader(_fileDefaults.openInputStream()); } } @@ -313,10 +302,10 @@ public void setDefaultParametersXML(PipeRoot root, String xml) throws IOExceptio throw new IllegalArgumentException("Line " + err.getLine() + ": " + err.getMessage()); } - Path fileDefault = getDefaultParametersFile(root); + FileLike fileDefault = getDefaultParametersFile(root); FileUtil.createDirectories(fileDefault.getParent()); - try (BufferedWriter writer = Files.newBufferedWriter(fileDefault, Charset.defaultCharset(), StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)) + try (PrintWriter writer = PrintWriters.getPrintWriter(fileDefault.openOutputStream())) { writer.write(xml, 0, xml.length()); } @@ -327,31 +316,12 @@ public void setDefaultParametersXML(PipeRoot root, String xml) throws IOExceptio } } - public static >, F extends AbstractFileAnalysisProtocolFactory> - F fromFile(Class clazz, File file) - { - List providers = PipelineService.get().getPipelineProviders(); - for (PipelineProvider provider : providers) - { - if (!(clazz.isInstance(provider))) - continue; - - T mprovider = (T) provider; - F factory = mprovider.getProtocolFactory(file); - if (factory != null) - return factory; - } - - // TODO: Return some default? - return null; - } - @Nullable - public AbstractFileAnalysisProtocol getProtocol(PipeRoot root, Path dirData, String protocolName, boolean archived) + public AbstractFileAnalysisProtocol getProtocol(PipeRoot root, FileLike dirData, String protocolName, boolean archived) { try { - Path protocolFile = getParametersFile(dirData, protocolName, root); + FileLike protocolFile = getParametersFile(dirData, protocolName, root); AbstractFileAnalysisProtocol result; if (NetworkDrive.exists(protocolFile)) { @@ -362,9 +332,16 @@ public AbstractFileAnalysisProtocol getProtocol(PipeRoot root, Path dirData, } else { - protocolFile = getProtocolFile(root, protocolName, archived); - if (protocolFile == null || !Files.exists(protocolFile)) + try + { + protocolFile = getProtocolFile(root, protocolName, archived); + if (protocolFile == null || !protocolFile.exists()) + return null; + } + catch (InvalidPathException e) + { return null; + } result = load(root, protocolName, archived); } diff --git a/api/src/org/labkey/api/pipeline/file/FileAnalysisJobSupport.java b/api/src/org/labkey/api/pipeline/file/FileAnalysisJobSupport.java index 52c9752230a..0a23361774e 100644 --- a/api/src/org/labkey/api/pipeline/file/FileAnalysisJobSupport.java +++ b/api/src/org/labkey/api/pipeline/file/FileAnalysisJobSupport.java @@ -19,8 +19,12 @@ import org.jetbrains.annotations.Nullable; import org.labkey.api.pipeline.ParamParser; import org.labkey.api.util.FileType; +import org.labkey.api.util.UnexpectedException; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.io.File; +import java.io.IOException; import java.nio.file.Path; import java.util.List; import java.util.Map; @@ -63,26 +67,26 @@ public interface FileAnalysisJobSupport /** * @return the directory in which the original input file resides. */ - @Deprecated //Prefer the getDataDirectoryPath version as File return type doesn't support full URIs very well - File getDataDirectory(); - default Path getDataDirectoryPath() + @Deprecated //Prefer the getDataDirectoryFileLike version as File return type doesn't support full URIs very well + default File getDataDirectory() { - // TODO This needs implementation in derived classes... - // This is typically safe but may cause an error if FileSystem provider isn't configured - return getDataDirectory().toPath(); + return getDataDirectoryFileLike().toNioPathForWrite().toFile(); } + FileLike getDataDirectoryFileLike(); + /** * @return the directory where the input files reside, and where the * final analysis should end up. */ - @Deprecated // Please use getAnalysisDirectoryPath instead, as File objects may have issues with full URIs + @Deprecated // Please use getAnalysisDirectoryFileLike File getAnalysisDirectory(); - default Path getAnalysisDirectoryPath() + + default FileLike getAnalysisDirectoryFileLike() { // TODO This needs implementation in derived classes... // This is typically safe but may cause an error if FileSystem provider isn't configured - return getAnalysisDirectory().toPath(); + return FileSystemLike.wrapFile(getAnalysisDirectory()); } /** @@ -90,14 +94,31 @@ default Path getAnalysisDirectoryPath() * This allows the task definitions to name files they require as input, * and the pipeline definition to specify where those files should come from. */ - @Deprecated // Please use findInputPath instead, as File objects may have issues with full URIs + @Deprecated // Please use findInputFileLike instead, as File objects may have issues with full URIs File findInputFile(String name); + @Deprecated // Please use findInputFileLike instead, as File objects may have issues with full URIs default Path findInputPath(String filepath) { // TODO This needs implementation in derived classes... // This is typically safe but may cause an error if FileSystem provider isn't configured return findInputFile(filepath).toPath(); } + default FileLike findInputFileLike(String filepath) + { + File file = findInputFile(filepath); + if (file != null) + { + try + { + return FileSystemLike.wrapFile(getDataDirectory(), file); + } + catch (IOException e) + { + throw UnexpectedException.wrap(e); + } + } + return null; + } /** * Returns a file for use as output in the pipeline, given its name. @@ -142,40 +163,12 @@ default Path findOutputPath(@NotNull String outputDir, @NotNull String filename) @Deprecated //Use Path based versions File getParametersFile(); - /** - * @return the job info file used to provide the external executable or script task with input file context. - */ - @Nullable - @Deprecated //Use Path based versions - File getJobInfoFile(); - /** * @return a list of all input files analyzed. */ @Deprecated List getInputFiles(); - - /** - * @return the parameters input file used to drive the pipeline. - */ - @Nullable - default Path getParametersFilePath() - { - //Implemented as such for backwards compatibility - return getParametersFile() == null ? null : getParametersFile().toPath(); - } - - /** - * @return the job info file used to provide the external executable or script task with input file context. - */ - @Nullable - default Path getJobInfoFilePath() - { - //Implemented as such for backwards compatibility - return getJobInfoFile() == null? null : getJobInfoFile().toPath(); - } - default List getInputFilePaths() { //Implemented as such for backwards compatibility diff --git a/api/src/org/labkey/api/pipeline/file/FileAnalysisTaskPipeline.java b/api/src/org/labkey/api/pipeline/file/FileAnalysisTaskPipeline.java index 04360c27cba..469338e5879 100644 --- a/api/src/org/labkey/api/pipeline/file/FileAnalysisTaskPipeline.java +++ b/api/src/org/labkey/api/pipeline/file/FileAnalysisTaskPipeline.java @@ -24,6 +24,7 @@ import org.labkey.api.util.FileType; import org.labkey.api.util.ReturnURLString; import org.labkey.api.util.URLHelper; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.FileFilter; @@ -31,6 +32,7 @@ import java.nio.file.Path; import java.util.List; import java.util.Map; +import java.util.function.Predicate; /** * FileAnalysisTaskPipeline @@ -38,13 +40,19 @@ */ public interface FileAnalysisTaskPipeline extends TaskPipeline { - interface FilePathFilter extends FileFilter, DirectoryStream.Filter + interface FilePathFilter extends FileFilter, DirectoryStream.Filter, Predicate { @Override boolean accept(File file); @Override boolean accept(Path path); + + @Override + default boolean test(FileLike fileLike) + { + return accept(fileLike.toNioPathForRead()); + } } diff --git a/api/src/org/labkey/api/reports/report/r/RReportJob.java b/api/src/org/labkey/api/reports/report/r/RReportJob.java index 79c220902a5..8090a2bada0 100644 --- a/api/src/org/labkey/api/reports/report/r/RReportJob.java +++ b/api/src/org/labkey/api/reports/report/r/RReportJob.java @@ -100,7 +100,7 @@ protected void init(@NotNull String executingContainerId) { _jobIdentifier.set(getJobGUID()); FileLike logFile = report.getReportDirFileLike(executingContainerId).resolveChild(LOG_FILE_NAME); - setLogFile(logFile.toNioPathForWrite()); + setLogFile(logFile); } } diff --git a/api/src/org/labkey/api/study/SpecimenTransform.java b/api/src/org/labkey/api/study/SpecimenTransform.java index d00b7731f19..e62f3af113e 100644 --- a/api/src/org/labkey/api/study/SpecimenTransform.java +++ b/api/src/org/labkey/api/study/SpecimenTransform.java @@ -23,6 +23,7 @@ import org.labkey.api.security.User; import org.labkey.api.util.FileType; import org.labkey.api.view.ActionURL; +import org.labkey.vfs.FileLike; import java.io.File; import java.nio.file.Path; @@ -75,7 +76,7 @@ public interface SpecimenTransform * @param importConfig configuration object * @param inputArchive the file to write the externally sourced data into */ - void importFromExternalSource(@Nullable PipelineJob job, ExternalImportConfig importConfig, File inputArchive) throws PipelineJobException; + void importFromExternalSource(@Nullable PipelineJob job, ExternalImportConfig importConfig, FileLike inputArchive) throws PipelineJobException; interface ExternalImportConfig { diff --git a/api/src/org/labkey/api/util/FileType.java b/api/src/org/labkey/api/util/FileType.java index 680795aafaa..d874803027c 100644 --- a/api/src/org/labkey/api/util/FileType.java +++ b/api/src/org/labkey/api/util/FileType.java @@ -334,6 +334,11 @@ public String getName(File parentDir, String basename) return getName(parentDir.toPath(), basename); } + public String getName(FileLike parentDir, String basename) + { + return getName(parentDir.toNioPathForRead(), basename); + } + public String getName(Path parentDir, String basename) { if (_suffixes.size() > 1) @@ -430,6 +435,11 @@ public String getBaseName(File file) return getBaseName(file.toPath()); } + public String getBaseName(FileLike file) + { + return getBaseName(file.toNioPathForRead()); + } + public String getBaseName(@NotNull java.nio.file.Path file) { String fileName = file.getFileName().toString(); @@ -469,6 +479,11 @@ public File newFile(File parent, String basename) return FileUtil.appendName(parent, getName(parent, basename)); } + public FileLike newFile(FileLike parent, String basename) + { + return parent.resolveChild(getName(parent, basename)); + } + public Path newFile(Path parent, String basename) { return FileUtil.appendName(parent, getName(parent, basename)); @@ -657,15 +672,15 @@ public String toString() } @NotNull - public static List findTypes(@NotNull List types, @NotNull List files) + public static List findTypes(@NotNull List types, @NotNull List files) { ArrayList foundTypes = new ArrayList<>(); // This O(n*m), but these are usually very short lists. for (FileType type : types) { - for (Path file : files) + for (FileLike file : files) { - if (type.isType(file.getFileName().toString())) + if (type.isType(file.getName())) { foundTypes.add(type); break; diff --git a/api/src/org/labkey/api/util/FileUtil.java b/api/src/org/labkey/api/util/FileUtil.java index d7675c3092d..fd358042a19 100644 --- a/api/src/org/labkey/api/util/FileUtil.java +++ b/api/src/org/labkey/api/util/FileUtil.java @@ -33,9 +33,11 @@ import org.labkey.api.cloud.CloudStoreService; import org.labkey.api.data.Container; import org.labkey.api.files.FileContentService; +import org.labkey.api.pipeline.PipelineService; import org.labkey.api.security.Crypt; import org.labkey.api.settings.AppProps; import org.labkey.api.util.logging.LogHelper; +import org.labkey.api.view.NotFoundException; import org.labkey.api.view.UnauthorizedException; import org.labkey.vfs.FileLike; import org.labkey.vfs.FileSystemLike; @@ -212,15 +214,24 @@ public static boolean deleteDir(File dir) return deleteDir(dir, null); } + public static boolean deleteDir(@NotNull FileLike dir) + { + return deleteDir(dir.toNioPathForWrite(), null); + } + + public static boolean deleteDir(@NotNull FileLike dir, @Nullable Logger log) + { + return deleteDir(dir.toNioPathForWrite(), log); + } @Deprecated - public static boolean deleteDir(@NotNull File dir, Logger log) + public static boolean deleteDir(@NotNull File dir, @Nullable Logger log) { return deleteDir(dir.toPath(), log); } - public static boolean deleteDir(Path dir, Logger log) + public static boolean deleteDir(@NotNull Path dir, @Nullable Logger log) { //TODO seems like this could be reworked to use Files.walkFileTree log = log == null ? LOG : log; @@ -450,6 +461,12 @@ public static boolean mkdirs(FileLike file, boolean checkFileName) throws IOExce } + public static FileLike createDirectory(FileLike path) throws IOException + { + createDirectory(path.toNioPathForWrite(), AppProps.getInstance().isInvalidFilenameBlocked()); + return path; + } + public static Path createDirectory(Path path) throws IOException { return createDirectory(path, AppProps.getInstance().isInvalidFilenameBlocked()); @@ -477,8 +494,7 @@ public static void createDirectories(FileLike file) throws IOException { if (!file.getFileSystem().canWriteFiles()) throw new UnauthorizedException(); - File target = toFileForWrite(file); - createDirectories(target.toPath(), AppProps.getInstance().isInvalidFilenameBlocked()); + createDirectories(file.toNioPathForWrite(), AppProps.getInstance().isInvalidFilenameBlocked()); } @@ -606,6 +622,11 @@ public static String getBaseName(File file, int dots) return getBaseName(file.getName(), dots); } + public static String getBaseName(FileLike file, int dots) + { + return getBaseName(file.toNioPathForRead().toFile(), dots); + } + /** * Remove text right of and including the last period in a file's name. @@ -617,6 +638,11 @@ public static String getBaseName(File file) return getBaseName(file, 1); } + public static String getBaseName(FileLike file) + { + return getBaseName(file, 1); + } + /** * Returns the file name extension without the dot, null if there @@ -926,7 +952,17 @@ public static Path stringToPath(Container container, String str) public static Path stringToPath(Container container, String str, boolean isEncoded) { if (!FileUtil.hasCloudScheme(str)) - return new File(createUri(str, isEncoded)).toPath(); + { + URI uri = createUri(str, isEncoded); + if (!uri.isAbsolute()) + { + return PipelineService.get().findPipelineRoot(container).resolveToNioPath(str); + } + else + { + return new File(uri).toPath(); + } + } else return Objects.requireNonNull(CloudStoreService.get()).getPathFromUrl(container, PageFlowUtil.decode(str)/*decode everything not just the space*/); } @@ -1085,6 +1121,16 @@ public static String matchPathLists(List home, List file) return path.toString(); } + public static void copyFile(FileLike src, FileLike dst) throws IOException + { + try (InputStream in = src.openInputStream(); + OutputStream out = dst.openOutputStream()) + { + copyData(in, out); + } + } + + public static void copyFile(File src, File dst) throws IOException { try (FileInputStream is = new FileInputStream(src); diff --git a/api/src/org/labkey/api/util/MaintenancePipelineJob.java b/api/src/org/labkey/api/util/MaintenancePipelineJob.java index eaee550dffe..ae0d00215ef 100644 --- a/api/src/org/labkey/api/util/MaintenancePipelineJob.java +++ b/api/src/org/labkey/api/util/MaintenancePipelineJob.java @@ -42,7 +42,7 @@ protected MaintenancePipelineJob(@JsonProperty("_tasks") Collection tasks) { super("SystemMaintenance", info, pipeRoot); - setLogFile(pipeRoot.getLogDirectoryFileLike(true).resolveChild(FileUtil.makeFileNameWithTimestamp("system_maintenance", "log")).toNioPathForWrite()); + setLogFile(pipeRoot.getLogDirectoryFileLike(true).resolveChild(FileUtil.makeFileNameWithTimestamp("system_maintenance", "log"))); _tasks = tasks; } diff --git a/study/src/org/labkey/study/writer/OutputStreamWrapper.java b/api/src/org/labkey/api/util/OutputStreamWrapper.java similarity index 79% rename from study/src/org/labkey/study/writer/OutputStreamWrapper.java rename to api/src/org/labkey/api/util/OutputStreamWrapper.java index 8506020f233..f076893ac67 100644 --- a/study/src/org/labkey/study/writer/OutputStreamWrapper.java +++ b/api/src/org/labkey/api/util/OutputStreamWrapper.java @@ -1,91 +1,89 @@ -/* - * Copyright (c) 2009-2018 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.study.writer; - -import java.io.IOException; -import java.io.OutputStream; - -/* -* User: adam -* Date: Sep 5, 2009 -* Time: 6:30:12 PM -*/ - -// Allows overriding arbitrary OutputStreams. A little strange, since OutputStream is an abstract class not an interface... -public class OutputStreamWrapper extends OutputStream -{ - private final OutputStream _out; - - public OutputStreamWrapper(OutputStream out) - { - _out = out; - } - - @Override - public void write(int b) throws IOException - { - _out.write(b); - } - - @Override - public void write(byte[] b) throws IOException - { - _out.write(b); - } - - @Override - public void write(byte[] b, int off, int len) throws IOException - { - _out.write(b, off, len); - } - - @Override - public void flush() throws IOException - { - _out.flush(); - } - - @Override - public void close() throws IOException - { - _out.close(); - } - - @Override - public int hashCode() - { - return _out.hashCode(); - } - - @Override - public boolean equals(Object obj) - { - return _out.equals(obj); - } - - @Override - protected Object clone() throws CloneNotSupportedException - { - throw new CloneNotSupportedException(); - } - - @Override - public String toString() - { - return _out.toString(); - } -} +/* + * Copyright (c) 2009-2018 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.api.util; + +import org.jetbrains.annotations.NotNull; + +import java.io.IOException; +import java.io.OutputStream; + +/** + * Allows overriding arbitrary OutputStreams. A little strange, since OutputStream is an abstract class not an interface... + */ +public class OutputStreamWrapper extends OutputStream +{ + private final OutputStream _out; + + public OutputStreamWrapper(OutputStream out) + { + _out = out; + } + + @Override + public void write(int b) throws IOException + { + _out.write(b); + } + + @Override + public void write(byte @NotNull[] b) throws IOException + { + _out.write(b); + } + + @Override + public void write(byte @NotNull[] b, int off, int len) throws IOException + { + _out.write(b, off, len); + } + + @Override + public void flush() throws IOException + { + _out.flush(); + } + + @Override + public void close() throws IOException + { + _out.close(); + } + + @Override + public int hashCode() + { + return _out.hashCode(); + } + + @Override + public boolean equals(Object obj) + { + return _out.equals(obj); + } + + @Override + protected Object clone() throws CloneNotSupportedException + { + throw new CloneNotSupportedException(); + } + + @Override + public String toString() + { + return _out.toString(); + } +} diff --git a/api/src/org/labkey/api/util/PossiblyGZIPpedFileInputStreamFactory.java b/api/src/org/labkey/api/util/PossiblyGZIPpedFileInputStreamFactory.java index f43864a88dc..ec063643fc1 100644 --- a/api/src/org/labkey/api/util/PossiblyGZIPpedFileInputStreamFactory.java +++ b/api/src/org/labkey/api/util/PossiblyGZIPpedFileInputStreamFactory.java @@ -15,9 +15,9 @@ */ package org.labkey.api.util; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; +import org.labkey.vfs.FileLike; + +import java.io.IOException; import java.io.InputStream; import java.util.zip.GZIPInputStream; @@ -36,9 +36,9 @@ abstract public class PossiblyGZIPpedFileInputStreamFactory { private static final int STREAM_BUFFER_SIZE = 128 * 1024; - static public InputStream getStream(File f) throws FileNotFoundException + static public InputStream getStream(FileLike f) throws IOException { - FileInputStream fis = new FileInputStream(f); + InputStream fis = f.openInputStream(); try { return new GZIPInputStream(fis, STREAM_BUFFER_SIZE); @@ -54,7 +54,7 @@ static public InputStream getStream(File f) throws FileNotFoundException { // seems unlikely at this point } - return new FileInputStream(f); + return f.openInputStream(); } } } diff --git a/api/src/org/labkey/api/writer/FileSystemFile.java b/api/src/org/labkey/api/writer/FileSystemFile.java index 298c2f0b42f..2ec36c79d15 100644 --- a/api/src/org/labkey/api/writer/FileSystemFile.java +++ b/api/src/org/labkey/api/writer/FileSystemFile.java @@ -23,6 +23,7 @@ import org.labkey.api.util.MinorConfigurationException; import org.labkey.api.util.XmlBeansUtil; import org.labkey.api.util.XmlValidationException; +import org.labkey.vfs.FileLike; import java.io.BufferedInputStream; import java.io.File; @@ -59,6 +60,11 @@ public FileSystemFile(File root) this(root.toPath()); } + public FileSystemFile(FileLike root) + { + this(root.toNioPathForWrite()); + } + public FileSystemFile(Path root) { try diff --git a/api/src/org/labkey/api/writer/ZipUtil.java b/api/src/org/labkey/api/writer/ZipUtil.java index f486ff24e3d..5bafbf1c8c4 100644 --- a/api/src/org/labkey/api/writer/ZipUtil.java +++ b/api/src/org/labkey/api/writer/ZipUtil.java @@ -23,6 +23,9 @@ import org.labkey.api.util.ResponseHelper; import jakarta.servlet.http.HttpServletResponse; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; + import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; @@ -46,16 +49,21 @@ */ public class ZipUtil { - // Unzip a zipped file archive to the specified directory - @Deprecated - public static List unzipToDirectory(File zipFile, File unzipDir) throws IOException + public static List unzipToDirectory(Path zipFile, Path unzipDir) throws IOException { - return unzipToDirectory(zipFile.toPath(), unzipDir.toPath()).stream().map(Path::toFile).collect(Collectors.toList()); + return unzipToDirectory(zipFile, unzipDir, null); } - public static List unzipToDirectory(Path zipFile, Path unzipDir) throws IOException + public static List unzipToDirectory(FileLike zipFile, FileLike unzipDir) throws IOException { - return unzipToDirectory(zipFile, unzipDir, null); + List paths = unzipToDirectory(zipFile.toNioPathForRead(), unzipDir.toNioPathForWrite(), null); + File rootFile = unzipDir.toNioPathForRead().toFile(); + List result = new ArrayList<>(); + for (Path path : paths) + { + result.add(FileSystemLike.wrapFile(rootFile, path.toFile())); + } + return result; } @Deprecated @@ -69,12 +77,6 @@ public static List unzipToDirectory(Path zipFile, Path unzipDir, @Nullable return unzipToDirectory(zipFile, unzipDir, log, false); } - @Deprecated - public static List unzipToDirectory(File zipFile, File unzipDir, @Nullable Logger log, boolean includeFolder) throws IOException - { - return unzipToDirectory(zipFile.toPath(), unzipDir.toPath(), log, includeFolder).stream().map(Path::toFile).collect(Collectors.toList()); - } - // Unzip an archive to the specified directory; log each file if Logger is non-null public static List unzipToDirectory(Path zipFile, Path unzipDir, @Nullable Logger log, boolean includeFolder) throws IOException { diff --git a/api/src/org/labkey/vfs/AbstractFileLike.java b/api/src/org/labkey/vfs/AbstractFileLike.java index 1d12f19baf0..be250592b04 100644 --- a/api/src/org/labkey/vfs/AbstractFileLike.java +++ b/api/src/org/labkey/vfs/AbstractFileLike.java @@ -1,10 +1,15 @@ package org.labkey.vfs; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.labkey.api.util.Path; import org.labkey.api.view.UnauthorizedException; import java.io.IOException; +@JsonSerialize(using = FileLike.FileLikeSerializer.class) +@JsonDeserialize(using = FileLike.FileLikeDeserializer.class) abstract public class AbstractFileLike implements FileLike { final Path path; @@ -115,4 +120,14 @@ public String toString() { return toURI().toString(); } + + protected void _serialize(JsonGenerator gen) throws IOException + { + FileSystemLike fs = getFileSystem(); + gen.writeStringField("fs", fs.getClass().getSimpleName()); + gen.writeStringField("rootUri", fs.getURI().toString()); + gen.writeBooleanField("canReadFiles", fs.canReadFiles()); + gen.writeBooleanField("canWriteFiles", fs.canWriteFiles()); + gen.writeStringField("path", getPath().toString()); + } } diff --git a/api/src/org/labkey/vfs/FileLike.java b/api/src/org/labkey/vfs/FileLike.java index 0466976cfd7..e615817b6f0 100644 --- a/api/src/org/labkey/vfs/FileLike.java +++ b/api/src/org/labkey/vfs/FileLike.java @@ -20,7 +20,9 @@ import java.io.InputStream; import java.io.OutputStream; import java.net.URI; +import java.nio.file.InvalidPathException; import java.util.List; +import java.util.function.Predicate; @JsonSerialize(using = FileLike.FileLikeSerializer.class) @JsonDeserialize(using = FileLike.FileLikeDeserializer.class) @@ -75,10 +77,10 @@ default FileLike resolveChild(Path.Part name) default FileLike resolveChild(String name) { if (".".equals(name) || "..".equals(name)) - throw new IllegalArgumentException("Cannot resolve child '" + name + "'"); + throw new InvalidPathException(name, "Cannot resolve child"); Path path = Path.parse(name); if (1 != path.size()) - throw new IllegalArgumentException("Cannot resolve child '" + name + "'"); + throw new InvalidPathException(name, "Cannot resolve child"); return resolveFile(path); } @@ -87,6 +89,12 @@ default FileLike resolveChild(String name) @NotNull List getChildren(); + @NotNull + default List getChildren(Predicate filter) + { + return getChildren().stream().filter(filter).toList(); + } + /** * Does not create parent directories */ @@ -119,43 +127,31 @@ default FileLike resolveChild(String name) InputStream openInputStream() throws IOException; - - class FileLikeSerializer extends StdSerializer + class FileLikeSerializer extends StdSerializer { public FileLikeSerializer() { this(null); } - public FileLikeSerializer(Class t) + public FileLikeSerializer(Class t) { super(t); } - public void _serialize(FileLike value, JsonGenerator gen, SerializerProvider provider) throws IOException - { - FileSystemLike fs = value.getFileSystem(); - gen.writeStringField("rootUri", fs.getURI().toString()); - gen.writeBooleanField("canReadFiles", fs.canReadFiles()); - gen.writeBooleanField("canWriteFiles", fs.canWriteFiles()); - gen.writeStringField("path", value.getPath().toString()); - if (fs instanceof FileSystemVFS) - gen.writeBooleanField("vfs", true); - } - @Override - public void serialize(FileLike value, JsonGenerator gen, SerializerProvider provider) throws IOException + public void serialize(AbstractFileLike value, JsonGenerator gen, SerializerProvider provider) throws IOException { gen.writeStartObject(); - _serialize(value, gen, provider); + value._serialize(gen); gen.writeEndObject(); } @Override - public void serializeWithType(FileLike value, JsonGenerator gen, SerializerProvider provider, TypeSerializer typeSer) throws IOException + public void serializeWithType(AbstractFileLike value, JsonGenerator gen, SerializerProvider provider, TypeSerializer typeSer) throws IOException { WritableTypeId typeIdDef = typeSer.writeTypePrefix(gen, typeSer.typeId(value, JsonToken.START_OBJECT)); - _serialize(value, gen, provider); + value._serialize(gen); typeSer.writeTypeSuffix(gen, typeIdDef); } } @@ -190,6 +186,11 @@ else if (canReadFiles) b.readonly(); if (vfs) b.vfs(); + // for cloud config + if (node.has("containerId")) + b.container(node.get("containerId").asText()); + if (node.has("configName")) + b.config(node.get("configName").asText()); return b.build().resolveFile(Path.parse(path)); } } diff --git a/api/src/org/labkey/vfs/FileSystemLike.java b/api/src/org/labkey/vfs/FileSystemLike.java index 953c002553e..9e635b55155 100644 --- a/api/src/org/labkey/vfs/FileSystemLike.java +++ b/api/src/org/labkey/vfs/FileSystemLike.java @@ -1,7 +1,9 @@ package org.labkey.vfs; +import org.labkey.api.cloud.CloudStoreService; import org.labkey.api.collections.CaseInsensitiveHashMap; import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; import org.labkey.api.pipeline.PipeRoot; import org.labkey.api.pipeline.PipelineService; import org.labkey.api.util.FileUtil; @@ -116,6 +118,8 @@ class Builder boolean canDeleteRoot = false; boolean memCheck = true; boolean caching = false; + String containerId = null; + String configName = null; public Builder(URI uri) { @@ -170,14 +174,42 @@ public Builder noMemCheck() return this; } + public Builder container(String containerId) + { + this.containerId = containerId; + return this; + } + + public Builder config(String configName) + { + this.configName = configName; + return this; + } + + + public final String S3_SCHEME = "s3"; // S3FileSystemProvider.getScheme(); + public FileSystemLike build() { var scheme = defaultIfBlank(uri.getScheme(), FILE_SCHEME); + FileSystemLike ret; - if (defaultVfs || !FILE_SCHEME.equals(scheme)) + if (S3_SCHEME.equals(scheme)) + { + Container c = ContainerManager.getForId(containerId); + if (null == c) + throw new RuntimeException("Container not found: " + containerId); + ret = CloudStoreService.get().getFileSystemLike(c, configName); + } + else if (defaultVfs && !FILE_SCHEME.equals(scheme)) + { ret = new FileSystemVFS(uri, canReadFiles, canWriteFiles, canDeleteRoot); + } else + { ret = new FileSystemLocal(uri, canReadFiles, canWriteFiles, canDeleteRoot); + } + if (caching) ret = ret.getCachingFileSystem(); if (!memCheck) diff --git a/api/src/org/labkey/vfs/FileSystemVFS.java b/api/src/org/labkey/vfs/FileSystemVFS.java index 641b4863283..71fe21c406d 100644 --- a/api/src/org/labkey/vfs/FileSystemVFS.java +++ b/api/src/org/labkey/vfs/FileSystemVFS.java @@ -1,5 +1,6 @@ package org.labkey.vfs; +import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.commons.lang3.StringUtils; @@ -309,5 +310,12 @@ public boolean equals(Object obj) return false; return vfs.getName().equals(other.vfs.getName()); } + + @Override + public void _serialize(JsonGenerator gen) throws IOException + { + super._serialize(gen); + gen.writeBooleanField("vfs", true); + } } } diff --git a/assay/api-src/org/labkey/api/assay/plate/AbstractPlateBasedAssayProvider.java b/assay/api-src/org/labkey/api/assay/plate/AbstractPlateBasedAssayProvider.java index 1e97a198f6a..e6e1b184ef0 100644 --- a/assay/api-src/org/labkey/api/assay/plate/AbstractPlateBasedAssayProvider.java +++ b/assay/api-src/org/labkey/api/assay/plate/AbstractPlateBasedAssayProvider.java @@ -22,7 +22,6 @@ import org.labkey.api.assay.AbstractTsvAssayProvider; import org.labkey.api.assay.AssayDataType; import org.labkey.api.assay.AssayProvider; -import org.labkey.api.assay.AssayRunCreator; import org.labkey.api.assay.AssayRunUploadContext; import org.labkey.api.assay.AssayService; import org.labkey.api.assay.actions.PlateUploadForm; @@ -121,9 +120,9 @@ public List>> createDefaultDomains(Cont } @Override - public AssayRunCreator getRunCreator() + public PlateBasedRunCreator getRunCreator() { - return new PlateBasedRunCreator(this); + return new PlateBasedRunCreator<>(this); } @Override @@ -385,7 +384,7 @@ public boolean collectPropertyOnUpload(AssayRunUploadContext uploadContext, S } } - public static class CurveFitTableInfo extends EnumTableInfo + public static class CurveFitTableInfo extends EnumTableInfo { PlateBasedAssayProvider _provider; diff --git a/assay/api-src/org/labkey/api/assay/plate/PlateBasedAssayProvider.java b/assay/api-src/org/labkey/api/assay/plate/PlateBasedAssayProvider.java index 4c716f2c652..0dd120dc38b 100644 --- a/assay/api-src/org/labkey/api/assay/plate/PlateBasedAssayProvider.java +++ b/assay/api-src/org/labkey/api/assay/plate/PlateBasedAssayProvider.java @@ -27,7 +27,6 @@ import org.labkey.api.study.assay.SampleMetadataInputFormat; import org.labkey.vfs.FileLike; -import java.io.File; import java.util.Collection; /** diff --git a/core/src/org/labkey/core/CoreController.java b/core/src/org/labkey/core/CoreController.java index 96068322b9a..5837de0c470 100644 --- a/core/src/org/labkey/core/CoreController.java +++ b/core/src/org/labkey/core/CoreController.java @@ -1936,7 +1936,8 @@ private List> getCloudArchiveImporters(FolderImporterForm fo private boolean isCloudArchive(FolderImporterForm form) { - return FileUtil.hasCloudScheme(form.getArchiveFilePath()); + String path = form.getArchiveFilePath(); + return StringUtils.isNotBlank(path) && FileUtil.hasCloudScheme(form.getArchiveFilePath()); } private List> getSelectableImporters(FolderImporterForm form, List registeredImporters) throws Exception diff --git a/core/src/org/labkey/core/admin/AdminController.java b/core/src/org/labkey/core/admin/AdminController.java index a24b9b65076..3bf1c68c95e 100644 --- a/core/src/org/labkey/core/admin/AdminController.java +++ b/core/src/org/labkey/core/admin/AdminController.java @@ -333,6 +333,7 @@ import org.labkey.data.xml.TablesDocument; import org.labkey.filters.ContentSecurityPolicyFilter; import org.labkey.security.xml.GroupEnumType; +import org.labkey.vfs.FileLike; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.validation.BindException; import org.springframework.validation.Errors; @@ -5336,7 +5337,7 @@ public boolean handlePost(ImportFolderForm form, BindException errors) throws Ex User user = getUser(); Container container = getContainer(); PipeRoot pipelineRoot; - Path pipelineUnzipDir; // Should be local & writable + FileLike pipelineUnzipDir; // Should be local & writable PipelineUrls pipelineUrlProvider; if (form.getOrigin() == null) @@ -5386,8 +5387,8 @@ public boolean handlePost(ImportFolderForm form, BindException errors) throws Ex } // get the folder.xml file from the unzipped import archive - Path archiveXml = pipelineUnzipDir.resolve("folder.xml"); - if (!Files.exists(archiveXml)) + FileLike archiveXml = pipelineUnzipDir.resolveChild("folder.xml"); + if (!archiveXml.exists()) { errors.reject("folderImport", "This archive doesn't contain a folder.xml file."); return false; @@ -5415,7 +5416,7 @@ public boolean handlePost(ImportFolderForm form, BindException errors) throws Ex return !errors.hasErrors(); } - private @Nullable FolderImportConfig getFolderFromZipArchive(Path pipelineUnzipDir, BindException errors) + private @Nullable FolderImportConfig getFolderFromZipArchive(FileLike pipelineUnzipDir, BindException errors) { // user chose to import from a zip file Map map = getFileMap(); @@ -5439,17 +5440,17 @@ public boolean handlePost(ImportFolderForm form, BindException errors) throws Ex // copy and unzip the uploaded import archive zip file to the pipeline unzip dir try { - Path pipelineUnzipFile = pipelineUnzipDir.resolve(originalFilename); + FileLike pipelineUnzipFile = pipelineUnzipDir.resolveFile(org.labkey.api.util.Path.parse(originalFilename)); // Check that the resolved file is under the pipelineUnzipDir - if (!pipelineUnzipFile.normalize().startsWith(pipelineUnzipDir.normalize())) + if (!pipelineUnzipFile.toNioPathForRead().normalize().startsWith(pipelineUnzipDir.toNioPathForRead().normalize())) { errors.reject("folderImport", "Invalid file path - must be within the unzip directory"); return null; } FileUtil.createDirectories(pipelineUnzipFile.getParent()); // Non-pipeline import sometimes fails here on Windows (shrug) - FileUtil.createFile(pipelineUnzipFile); - try (OutputStream os = Files.newOutputStream(pipelineUnzipFile)) + FileUtil.createNewFile(pipelineUnzipFile, true); + try (OutputStream os = pipelineUnzipFile.openOutputStream()) { FileUtil.copyData(zipFile.getInputStream(), os); } @@ -5476,7 +5477,7 @@ public boolean handlePost(ImportFolderForm form, BindException errors) throws Ex } } - private FolderImportConfig getFolderImportConfigFromTemplateFolder(final ImportFolderForm form, final Path pipelineUnzipDir, final BindException errors) throws Exception + private FolderImportConfig getFolderImportConfigFromTemplateFolder(final ImportFolderForm form, final FileLike pipelineUnzipDir, final BindException errors) throws Exception { // user choose to import from a template source folder Container sourceContainer = form.getSourceTemplateFolderContainer(); @@ -5488,7 +5489,12 @@ private FolderImportConfig getFolderImportConfigFromTemplateFolder(final ImportF PHI.NotPHI, false, false, false, new StaticLoggerGetter(LogManager.getLogger(FolderWriterImpl.class))); FolderWriterImpl writer = new FolderWriterImpl(); String zipFileName = FileUtil.makeFileNameWithTimestamp(sourceContainer.getName(), "folder.zip"); - try (ZipFile zip = new ZipFile(pipelineUnzipDir, zipFileName)) + FileLike implicitZipFile = pipelineUnzipDir.resolveChild(zipFileName); + if (!pipelineUnzipDir.isDirectory()) + pipelineUnzipDir.mkdirs(); + implicitZipFile.createFile(); + try (OutputStream out = implicitZipFile.openOutputStream(); + ZipFile zip = new ZipFile(out, false)) { writer.write(sourceContainer, ctx, zip); } @@ -5496,26 +5502,25 @@ private FolderImportConfig getFolderImportConfigFromTemplateFolder(final ImportF { errors.reject(SpringActionController.ERROR_MSG, e.getMessage()); } - Path implicitZipFile = pipelineUnzipDir.resolve(zipFileName); // To support the simple import option unzip the zip file to the pipeline unzip dir of the current container ZipUtil.unzipToDirectory(implicitZipFile, pipelineUnzipDir); return new FolderImportConfig( StringUtils.isNotEmpty(form.getSourceTemplateFolderId()), - implicitZipFile.getFileName().toString(), + implicitZipFile.getName(), implicitZipFile, null ); } private static class FolderImportConfig { - Path pipelineUnzipFile; + FileLike pipelineUnzipFile; String originalFileName; - Path archiveFile; + FileLike archiveFile; boolean fromTemplateSourceFolder; - public FolderImportConfig(boolean fromTemplateSourceFolder, String originalFileName, Path archiveFile, @Nullable Path pipelineUnzipFile) + public FolderImportConfig(boolean fromTemplateSourceFolder, String originalFileName, FileLike archiveFile, @Nullable FileLike pipelineUnzipFile) { this.originalFileName = originalFileName; this.archiveFile = archiveFile; diff --git a/core/src/org/labkey/core/admin/ValidateDomainsPipelineJob.java b/core/src/org/labkey/core/admin/ValidateDomainsPipelineJob.java index eff12e58b64..6cdc7d51b3f 100644 --- a/core/src/org/labkey/core/admin/ValidateDomainsPipelineJob.java +++ b/core/src/org/labkey/core/admin/ValidateDomainsPipelineJob.java @@ -23,8 +23,8 @@ import org.labkey.api.util.URLHelper; import org.labkey.api.util.UnexpectedException; import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; /** @@ -43,7 +43,7 @@ public ValidateDomainsPipelineJob(ViewBackgroundInfo info, PipeRoot root) try { - File logFile = FileUtil.createTempFile("validateDomains", ".log", root.ensureSystemDirectory()); + FileLike logFile = FileUtil.createTempFile("validateDomains", ".log", root.ensureSystemDirectory()); setLogFile(logFile); } catch (IOException e) diff --git a/experiment/src/org/labkey/experiment/CompressedInputStreamXarSource.java b/experiment/src/org/labkey/experiment/CompressedInputStreamXarSource.java index 485247f6930..58b2572578e 100644 --- a/experiment/src/org/labkey/experiment/CompressedInputStreamXarSource.java +++ b/experiment/src/org/labkey/experiment/CompressedInputStreamXarSource.java @@ -10,6 +10,7 @@ import org.labkey.api.security.User; import org.labkey.api.util.FileUtil; import org.labkey.api.util.XmlBeansUtil; +import org.labkey.vfs.FileLike; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; @@ -19,7 +20,6 @@ import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.StandardCharsets; -import java.nio.file.Path; import java.util.Map; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; @@ -32,10 +32,10 @@ public class CompressedInputStreamXarSource extends AbstractFileXarSource { private final InputStream _xarInputStream; - private final Path _logFile; + private final FileLike _logFile; private String _xml; - public CompressedInputStreamXarSource(InputStream xarInputStream, Path xarFile, Path logFile, @Nullable PipelineJob job, User user, Container container, @Nullable Map substitutions) + public CompressedInputStreamXarSource(InputStream xarInputStream, FileLike xarFile, FileLike logFile, @Nullable PipelineJob job, User user, Container container, @Nullable Map substitutions) { super(job == null ? null : job.getDescription(), container, user, job, substitutions); _xarInputStream = xarInputStream; @@ -86,7 +86,7 @@ public ExperimentArchiveDocument getDocument() throws XmlException, IOException } @Override - public Path getLogFilePath() + public FileLike getLogFilePath() { return _logFile; } diff --git a/experiment/src/org/labkey/experiment/XarExportPipelineJob.java b/experiment/src/org/labkey/experiment/XarExportPipelineJob.java index bec82562dfc..660d6bc1ef2 100644 --- a/experiment/src/org/labkey/experiment/XarExportPipelineJob.java +++ b/experiment/src/org/labkey/experiment/XarExportPipelineJob.java @@ -73,7 +73,7 @@ public XarExportPipelineJob(ViewBackgroundInfo info, PipeRoot root, String fileN _exportFile = exportedXarsDir.resolveChild(_fileName).toNioPathForWrite().toFile(); - setLogFile(exportedXarsDir.resolveChild(fileName + ".log").toNioPathForWrite()); + setLogFile(exportedXarsDir.resolveChild(fileName + ".log")); header("Experiment export to " + _exportFile.getName()); } diff --git a/experiment/src/org/labkey/experiment/api/ExperimentServiceImpl.java b/experiment/src/org/labkey/experiment/api/ExperimentServiceImpl.java index 23ba91d58dc..768675f0b00 100644 --- a/experiment/src/org/labkey/experiment/api/ExperimentServiceImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExperimentServiceImpl.java @@ -8164,9 +8164,9 @@ public List getExpProtocolsWithParameterValue( } @Override - public PipelineJob importXarAsync(ViewBackgroundInfo info, File file, String description, PipeRoot root) throws IOException + public PipelineJob importXarAsync(ViewBackgroundInfo info, FileLike file, String description, PipeRoot root) throws IOException { - ExperimentPipelineJob job = new ExperimentPipelineJob(info, file.toPath(), description, false, root); + ExperimentPipelineJob job = new ExperimentPipelineJob(info, file, description, false, root); try { PipelineService.get().queueJob(job); diff --git a/experiment/src/org/labkey/experiment/controllers/exp/ExperimentController.java b/experiment/src/org/labkey/experiment/controllers/exp/ExperimentController.java index 00644fd3577..bd3c8e9359b 100644 --- a/experiment/src/org/labkey/experiment/controllers/exp/ExperimentController.java +++ b/experiment/src/org/labkey/experiment/controllers/exp/ExperimentController.java @@ -23,9 +23,9 @@ import jakarta.servlet.http.HttpSession; import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.Strings; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.poi.openxml4j.exceptions.InvalidFormatException; import org.apache.poi.ss.usermodel.Workbook; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; @@ -4969,7 +4969,7 @@ private ActionURL exportXAR(@NotNull XarExportSelection selection, @Nullable LSI fileName = fixupExportName(fileName); String xarXmlFileName = null; - if (StringUtils.endsWithIgnoreCase(fileName, ".xar")) + if (Strings.CI.endsWith(fileName, ".xar")) xarXmlFileName = fileName + ".xml"; switch (exportType) @@ -6731,10 +6731,10 @@ public Object execute(Object o, BindException errors) throws Exception } PipeRoot pipeRoot = PipelineService.get().findPipelineRoot(getContainer()); - Path systemDir = pipeRoot.ensureSystemDirectoryPath(); - Path uploadDir = systemDir.resolve("UploadedXARs"); + FileLike systemDir = pipeRoot.ensureSystemDirectory(); + FileLike uploadDir = systemDir.resolveChild("UploadedXARs"); FileUtil.createDirectories(uploadDir); - if (!Files.isDirectory(uploadDir)) + if (!uploadDir.isDirectory()) { errors.reject(ERROR_MSG, "Unable to create a 'system/UploadedXARs' directory under the pipeline root"); return false; @@ -6744,18 +6744,18 @@ public Object execute(Object o, BindException errors) throws Exception { userDirName = GUEST_DIRECTORY_NAME; } - Path userDir = FileUtil.appendName(uploadDir, userDirName); + FileLike userDir = uploadDir.resolveChild(userDirName); FileUtil.createDirectories(userDir); - if (!Files.isDirectory(userDir)) + if (!userDir.isDirectory()) { errors.reject(ERROR_MSG, "Unable to create an 'UploadedXARs/" + userDirName + "' directory under the pipeline root"); return false; } - Path xarFile = FileUtil.appendName(userDir, formFile.getOriginalFilename()); + FileLike xarFile = userDir.resolveChild(formFile.getOriginalFilename()); // As this is multi-part will need to use finally to close, to prevent a stream closure exception - try (OutputStream out = new BufferedOutputStream(Files.newOutputStream(xarFile))) + try (OutputStream out = new BufferedOutputStream(xarFile.openOutputStream())) { out.write(bytes); } @@ -6786,17 +6786,17 @@ public void validateCommand(ImportXarForm target, Errors errors) @Override public boolean handlePost(ImportXarForm form, BindException errors) throws Exception { - for (File f : form.getValidatedFiles(getContainer())) + for (FileLike f : form.getValidatedFiles(getContainer())) { if (f.isFile()) { - ExperimentPipelineJob job = new ExperimentPipelineJob(getViewBackgroundInfo(), f.toPath(), "Experiment Import", false, form.getPipeRoot(getContainer())); + ExperimentPipelineJob job = new ExperimentPipelineJob(getViewBackgroundInfo(), f, "Experiment Import", false, form.getPipeRoot(getContainer())); // TODO: Configure module resources with the appropriate log location per container if (form.getModule() != null) { FileLike logFile = form.getPipeRoot(getContainer()).getLogDirectoryFileLike(true).resolveChild("module-resource-xar.log"); - job.setLogFile(logFile.toNioPathForWrite()); + job.setLogFile(logFile); } PipelineService.get().queueJob(job); @@ -6827,16 +6827,16 @@ public Object execute(ImportXarForm form, BindException errors) throws Exception ApiSimpleResponse response = new ApiSimpleResponse(); List> archives = new ArrayList<>(); - for (File f : form.getValidatedFiles(getContainer())) + for (FileLike f : form.getValidatedFiles(getContainer())) { Map archive = new HashMap<>(); - ExperimentPipelineJob job = new ExperimentPipelineJob(getViewBackgroundInfo(), f.toPath(), "Experiment Import", false, form.getPipeRoot(getContainer())); + ExperimentPipelineJob job = new ExperimentPipelineJob(getViewBackgroundInfo(), f, "Experiment Import", false, form.getPipeRoot(getContainer())); // TODO: Configure module resources with the appropriate log location per container if (form.getModule() != null) { FileLike logFile = form.getPipeRoot(getContainer()).getLogDirectoryFileLike(true).resolveChild("module-resource-xar.log"); - job.setLogFile(logFile.toNioPathForWrite()); + job.setLogFile(logFile); } PipelineService.get().queueJob(job); diff --git a/experiment/src/org/labkey/experiment/controllers/exp/ImportXarForm.java b/experiment/src/org/labkey/experiment/controllers/exp/ImportXarForm.java index f0ca5ddd765..306971e4a7c 100644 --- a/experiment/src/org/labkey/experiment/controllers/exp/ImportXarForm.java +++ b/experiment/src/org/labkey/experiment/controllers/exp/ImportXarForm.java @@ -23,6 +23,8 @@ import org.labkey.api.resource.Resource; import org.labkey.api.util.NetworkDrive; import org.labkey.api.view.NotFoundException; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.io.File; import java.util.ArrayList; @@ -51,7 +53,7 @@ public void setModule(String module) * default pipeline xar processing. */ @Override - public List getValidatedFiles(Container c, boolean allowNonExistentFiles) + public List getValidatedFiles(Container c, boolean allowNonExistentFiles) { if (_module == null) return super.getValidatedFiles(c, allowNonExistentFiles); @@ -70,7 +72,7 @@ public List getValidatedFiles(Container c, boolean allowNonExistentFiles) throw new NotFoundException("Could not find path " + getPath()); } - List files = new ArrayList<>(); + List files = new ArrayList<>(); for (String fileName : getFile()) { Resource rf = m.getModuleResource(getPath() + "/" + fileName); @@ -85,7 +87,7 @@ public List getValidatedFiles(Container c, boolean allowNonExistentFiles) { throw new NotFoundException("Could not find file '" + f + "'"); } - files.add(f); + files.add(FileSystemLike.wrapFile(f)); } return files; diff --git a/experiment/src/org/labkey/experiment/pipeline/ExperimentPipelineJob.java b/experiment/src/org/labkey/experiment/pipeline/ExperimentPipelineJob.java index 9df4b071384..1571ec5d2f2 100644 --- a/experiment/src/org/labkey/experiment/pipeline/ExperimentPipelineJob.java +++ b/experiment/src/org/labkey/experiment/pipeline/ExperimentPipelineJob.java @@ -31,10 +31,9 @@ import org.labkey.api.util.PageFlowUtil; import org.labkey.api.view.ActionURL; import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; -import java.nio.file.Path; import java.sql.BatchUpdateException; import java.util.List; @@ -48,14 +47,14 @@ public class ExperimentPipelineJob extends PipelineJob private static final Object _experimentLock = new Object(); - private final Path _xarFile; + private final FileLike _xarFile; private final String _description; private final boolean _deleteExistingRuns; private transient XarSource _xarSource; @JsonCreator - protected ExperimentPipelineJob(@JsonProperty("_xarFile") Path xarFile, + protected ExperimentPipelineJob(@JsonProperty("_xarFile") FileLike xarFile, @JsonProperty("_description") String description, @JsonProperty("_deleteExistingRuns") boolean deleteExistingRuns) { @@ -65,26 +64,20 @@ protected ExperimentPipelineJob(@JsonProperty("_xarFile") Path xarFile, _deleteExistingRuns = deleteExistingRuns; } - @Deprecated //Prefer the Path version - public ExperimentPipelineJob(ViewBackgroundInfo info, File file, String description, boolean deleteExistingRuns, PipeRoot root) throws IOException - { - this(info, file.toPath(), description, deleteExistingRuns, root); - } - - public ExperimentPipelineJob(ViewBackgroundInfo info, Path file, String description, boolean deleteExistingRuns, PipeRoot root) throws IOException + public ExperimentPipelineJob(ViewBackgroundInfo info, FileLike file, String description, boolean deleteExistingRuns, PipeRoot root) throws IOException { super(ExperimentPipelineProvider.NAME, info, root); _xarFile = file; - _description = description + " - " + file.getFileName().toString(); + _description = description + " - " + file.getName(); _deleteExistingRuns = deleteExistingRuns; XarSource xarSource = getXarSource(); header("XAR Import from " + xarSource.toString()); } - protected XarSource createXarSource(Path file) + protected XarSource createXarSource(FileLike file) { - String name = file.getFileName().toString().toLowerCase(); + String name = file.getName().toLowerCase(); if (name.endsWith(".xar") || name.endsWith(".zip")) { return new CompressedXarSource(file, this); diff --git a/experiment/src/org/labkey/experiment/pipeline/ExperimentPipelineProvider.java b/experiment/src/org/labkey/experiment/pipeline/ExperimentPipelineProvider.java index 4d797234e81..307413bbb58 100644 --- a/experiment/src/org/labkey/experiment/pipeline/ExperimentPipelineProvider.java +++ b/experiment/src/org/labkey/experiment/pipeline/ExperimentPipelineProvider.java @@ -49,7 +49,7 @@ public static Path getMoveDirectory(PipeRoot pr) private static Path getExperimentDirectory(PipeRoot pr, String name) { - Path systemDir = pr.ensureSystemDirectoryPath(); + Path systemDir = pr.ensureSystemDirectory().toNioPathForRead(); return systemDir.resolve(DIR_NAME_EXPERIMENT).resolve(name); } diff --git a/experiment/src/org/labkey/experiment/pipeline/MoveRunsTask.java b/experiment/src/org/labkey/experiment/pipeline/MoveRunsTask.java index acc6a3ec7ee..12d9ec8b57f 100644 --- a/experiment/src/org/labkey/experiment/pipeline/MoveRunsTask.java +++ b/experiment/src/org/labkey/experiment/pipeline/MoveRunsTask.java @@ -39,6 +39,8 @@ import org.labkey.experiment.XarReader; import org.labkey.experiment.api.ExpRunImpl; import org.labkey.experiment.api.ExperimentServiceImpl; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.io.ByteArrayOutputStream; import java.io.File; @@ -252,9 +254,9 @@ public String canonicalizeDataFileURL(String dataFileURL) } @Override - public Path getLogFilePath() + public FileLike getLogFilePath() { - return _logFile.toPath(); + return FileSystemLike.wrapFile(_logFile); } public String toString() diff --git a/experiment/src/org/labkey/experiment/pipeline/SampleReloadTask.java b/experiment/src/org/labkey/experiment/pipeline/SampleReloadTask.java index 5237faa9b48..ff7594d64c9 100644 --- a/experiment/src/org/labkey/experiment/pipeline/SampleReloadTask.java +++ b/experiment/src/org/labkey/experiment/pipeline/SampleReloadTask.java @@ -65,7 +65,7 @@ public RecordedActionSet run() { PipelineJob job = getJob(); FileAnalysisJobSupport support = job.getJobSupport(FileAnalysisJobSupport.class); - job.setLogFile(FileUtil.appendName(support.getDataDirectory(), FileUtil.makeFileNameWithTimestamp("triggered_sample_reload", "log"))); + job.setLogFile(support.getDataDirectoryFileLike().resolveChild(FileUtil.makeFileNameWithTimestamp("triggered_sample_reload", "log"))); Map params = support.getParameters(); job.setStatus("RELOADING", "Job started at: " + DateUtil.nowISO()); diff --git a/experiment/src/org/labkey/experiment/pipeline/XarGeneratorSource.java b/experiment/src/org/labkey/experiment/pipeline/XarGeneratorSource.java index dc7b4a4be83..be9b9cf220e 100644 --- a/experiment/src/org/labkey/experiment/pipeline/XarGeneratorSource.java +++ b/experiment/src/org/labkey/experiment/pipeline/XarGeneratorSource.java @@ -18,6 +18,7 @@ import org.fhcrc.cpas.exp.xml.ExperimentArchiveDocument; import org.labkey.api.exp.AbstractFileXarSource; import org.labkey.api.pipeline.PipelineJob; +import org.labkey.vfs.FileLike; import java.nio.file.Path; @@ -27,14 +28,14 @@ */ public class XarGeneratorSource extends AbstractFileXarSource { - public XarGeneratorSource(PipelineJob job, Path xarFile) + public XarGeneratorSource(PipelineJob job, FileLike xarFile) { super(job); _xmlFile = xarFile; } @Override - public Path getLogFilePath() + public FileLike getLogFilePath() { throw new UnsupportedOperationException(); } diff --git a/experiment/src/org/labkey/experiment/pipeline/XarGeneratorTask.java b/experiment/src/org/labkey/experiment/pipeline/XarGeneratorTask.java index f86c683d814..f0e60638a03 100644 --- a/experiment/src/org/labkey/experiment/pipeline/XarGeneratorTask.java +++ b/experiment/src/org/labkey/experiment/pipeline/XarGeneratorTask.java @@ -39,14 +39,13 @@ import org.labkey.experiment.DataURLRelativizer; import org.labkey.api.exp.xar.LSIDRelativizer; import org.labkey.experiment.XarExporter; +import org.labkey.vfs.FileLike; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Files; -import java.nio.file.Path; import java.nio.file.StandardCopyOption; -import java.nio.file.StandardOpenOption; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -105,10 +104,10 @@ public List getProtocolActionNames() return Collections.emptyList(); } - protected Path getXarFile(PipelineJob job) + protected FileLike getXarFile(PipelineJob job) { FileAnalysisJobSupport jobSupport = job.getJobSupport(FileAnalysisJobSupport.class); - return getOutputType().newFile(jobSupport.getAnalysisDirectoryPath(), jobSupport.getBaseName()); + return getOutputType().newFile(jobSupport.getAnalysisDirectoryFileLike(), jobSupport.getBaseName()); } @Override @@ -159,7 +158,7 @@ public RecordedActionSet run() throws PipelineJobException Set importedRuns = new HashSet<>(); if (_factory.isLoadFiles()) { - Path permanentXAR = _factory.getXarFile(getJob()); + FileLike permanentXAR = _factory.getXarFile(getJob()); if (NetworkDrive.exists(permanentXAR)) { // Be sure that it's been imported (and not already deleted from the database) @@ -176,7 +175,7 @@ public RecordedActionSet run() throws PipelineJobException // Load the data files for this run importedRuns.addAll(ExperimentService.get().importXar(new FileXarSource(getLoadingXarFile(), getJob()), getJob(), false)); - Files.move(getLoadingXarFile(), permanentXAR); + Files.move(getLoadingXarFile().toNioPathForWrite(), permanentXAR.toNioPathForWrite()); } } else @@ -225,19 +224,19 @@ public RecordedActionSet run() throws PipelineJobException @Override public void writeToDisk(ExpRun run) throws PipelineJobException { - Path f = getLoadingXarFile(); - Path tempFile = f.getParent().resolve(f.getFileName().toString() + ".temp"); + FileLike f = getLoadingXarFile(); + FileLike tempFile = f.getParent().resolveChild(f.getName() + ".temp"); try { XarExporter exporter = new XarExporter(LSIDRelativizer.FOLDER_RELATIVE, DataURLRelativizer.RUN_RELATIVE_LOCATION.createURLRewriter(), getJob().getUser(), getJob().getContainer()); exporter.addExperimentRun(run); - try (OutputStream fOut = new BufferedOutputStream(Files.newOutputStream(tempFile, StandardOpenOption.CREATE, StandardOpenOption.WRITE))) + try (OutputStream fOut = new BufferedOutputStream(tempFile.openOutputStream())) { exporter.dumpXML(fOut); fOut.close(); - Files.move(tempFile, f, StandardCopyOption.ATOMIC_MOVE); + Files.move(tempFile.toNioPathForWrite(), f.toNioPathForWrite(), StandardCopyOption.ATOMIC_MOVE); } } catch (ExperimentException | IOException e) @@ -246,9 +245,9 @@ public void writeToDisk(ExpRun run) throws PipelineJobException } } - private Path getLoadingXarFile() + private FileLike getLoadingXarFile() { - Path xarPath = _factory.getXarFile(getJob()); - return xarPath.resolve(xarPath + ".loading"); + FileLike xarPath = _factory.getXarFile(getJob()); + return xarPath.getParent().resolveChild(xarPath.getName() + ".loading"); } } diff --git a/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java b/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java index 750b80d1628..5147ae170f4 100644 --- a/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java +++ b/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java @@ -46,10 +46,11 @@ import org.labkey.experiment.api.SampleTypeServiceImpl; import org.labkey.experiment.xar.FolderXarImporterFactory; import org.labkey.experiment.xar.XarImportContext; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.io.IOException; import java.io.InputStream; -import java.nio.file.Files; import java.nio.file.Path; import java.sql.SQLException; import java.util.Collections; @@ -90,9 +91,9 @@ public void process(@Nullable PipelineJob job, FolderImportContext ctx, VirtualF if (xarDir != null) { // #44384 Generate a relative Path object for the folder's VirtualFile - Path xarDirPath = Path.of(xarDir.getLocation()); - Path typesXarFile = null; - Path runsXarFile = null; + FileLike xarDirPath = FileSystemLike.wrapFile(Path.of(xarDir.getLocation()).toAbsolutePath()); + FileLike typesXarFile = null; + FileLike runsXarFile = null; Logger log = ctx.getLogger(); if (null != job) @@ -104,14 +105,14 @@ public void process(@Nullable PipelineJob job, FolderImportContext ctx, VirtualF if (isXarTypesFile(file)) { if (typesXarFile == null) - typesXarFile = xarDirPath.resolve(file); + typesXarFile = xarDirPath.resolveChild(file); else log.error("More than one types XAR file found in the sample type directory: ", file); } else if (file.equalsIgnoreCase(XAR_RUNS_NAME) || file.equalsIgnoreCase(XAR_RUNS_XML_NAME)) { if (runsXarFile == null) - runsXarFile = xarDirPath.resolve(file); + runsXarFile = xarDirPath.resolveChild(file); else log.error("More than one runs XAR file found in the sample type directory: ", file); } @@ -126,8 +127,8 @@ else if (file.equalsIgnoreCase(XAR_RUNS_NAME) || file.equalsIgnoreCase(XAR_RUNS_ { if (typesXarFile != null) { - Path logFile = null; - if (Files.exists(typesXarFile)) + FileLike logFile = null; + if (typesXarFile.exists()) logFile = CompressedInputStreamXarSource.getLogFileFor(typesXarFile); XarReader typesReader = getXarReader(job, ctx, root, typesXarFile); XarContext xarContext = typesReader.getXarSource().getXarContext(); @@ -142,10 +143,10 @@ else if (file.equalsIgnoreCase(XAR_RUNS_NAME) || file.equalsIgnoreCase(XAR_RUNS_ if (runsXarFile != null) { XarSource runsXarSource; - if (runsXarFile.getFileName().toString().toLowerCase().endsWith(".xar.xml")) + if (runsXarFile.getName().toLowerCase().endsWith(".xar.xml")) runsXarSource = new FileXarSource(runsXarFile, job, ctx.getContainer(), ctx.getXarJobIdContext()); else - runsXarSource = new CompressedInputStreamXarSource(xarDir.getInputStream(runsXarFile.getFileName().toString()), runsXarFile, logFile, job, ctx.getUser(), ctx.getContainer(), ctx.getXarJobIdContext()); + runsXarSource = new CompressedInputStreamXarSource(xarDir.getInputStream(runsXarFile.getName()), runsXarFile, logFile, job, ctx.getUser(), ctx.getContainer(), ctx.getXarJobIdContext()); try { runsXarSource.init(); @@ -155,7 +156,7 @@ else if (file.equalsIgnoreCase(XAR_RUNS_NAME) || file.equalsIgnoreCase(XAR_RUNS_ log.error("Failed to initialize runs XAR source", e); throw(e); } - log.info("Importing the runs XAR file: " + runsXarFile.getFileName().toString()); + log.info("Importing the runs XAR file: " + runsXarFile.getName()); XarReader runsReader = new FolderXarImporterFactory.FolderExportXarReader(runsXarSource, job); runsReader.setStrictValidateExistingSampleType(xarCtx.isStrictValidateExistingSampleType()); runsReader.parseAndLoad(false, ctx.getAuditBehaviorType()); @@ -188,14 +189,14 @@ else if (file.equalsIgnoreCase(XAR_RUNS_NAME) || file.equalsIgnoreCase(XAR_RUNS_ } } - protected XarReader getXarReader(@Nullable PipelineJob job, FolderImportContext ctx, VirtualFile root, Path typesXarFile) throws IOException, ExperimentException + protected XarReader getXarReader(@Nullable PipelineJob job, FolderImportContext ctx, VirtualFile root, FileLike typesXarFile) throws IOException, ExperimentException { VirtualFile xarDir = getXarDir(root); Logger log = ctx.getLogger(); - Path logFile = null; + FileLike logFile = null; // we don't need the log file in cases where the xarFile is a virtual file and not in the file system - if (Files.exists(typesXarFile)) + if (typesXarFile.exists()) logFile = CompressedInputStreamXarSource.getLogFileFor(typesXarFile); if (job == null) @@ -206,10 +207,10 @@ protected XarReader getXarReader(@Nullable PipelineJob job, FolderImportContext XarSource typesXarSource; - if (typesXarFile.getFileName().toString().toLowerCase().endsWith(".xar.xml")) + if (typesXarFile.getName().toLowerCase().endsWith(".xar.xml")) typesXarSource = new FileXarSource(typesXarFile, job, ctx.getContainer(), ctx.getXarJobIdContext()); else - typesXarSource = new CompressedInputStreamXarSource(xarDir.getInputStream(typesXarFile.getFileName().toString()), typesXarFile, logFile, job, ctx.getUser(), ctx.getContainer(), ctx.getXarJobIdContext()); + typesXarSource = new CompressedInputStreamXarSource(xarDir.getInputStream(typesXarFile.getName()), typesXarFile, logFile, job, ctx.getUser(), ctx.getContainer(), ctx.getXarJobIdContext()); try { typesXarSource.init(); diff --git a/experiment/src/org/labkey/experiment/samples/SampleStatusFolderImporter.java b/experiment/src/org/labkey/experiment/samples/SampleStatusFolderImporter.java index 7ade7bb4675..7d3e441d744 100644 --- a/experiment/src/org/labkey/experiment/samples/SampleStatusFolderImporter.java +++ b/experiment/src/org/labkey/experiment/samples/SampleStatusFolderImporter.java @@ -15,6 +15,8 @@ import org.labkey.api.util.FileUtil; import org.labkey.api.writer.VirtualFile; import org.labkey.experiment.XarReader; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; import java.nio.file.Path; import java.util.HashMap; @@ -52,8 +54,8 @@ public void process(@Nullable PipelineJob job, FolderImportContext ctx, VirtualF if (xarDir != null) { // #44384 Generate a relative Path object for the folder's VirtualFile - Path xarDirPath = Path.of(xarDir.getLocation()); - Path typesXarFile = null; + FileLike xarDirPath = FileSystemLike.wrapFile(Path.of(xarDir.getLocation()).toAbsolutePath()); + FileLike typesXarFile = null; Map sampleStatusDataFiles = new HashMap<>(); Logger log = ctx.getLogger(); @@ -66,7 +68,7 @@ public void process(@Nullable PipelineJob job, FolderImportContext ctx, VirtualF if (file.equalsIgnoreCase(XAR_TYPES_NAME) || file.equalsIgnoreCase(XAR_TYPES_XML_NAME)) { if (typesXarFile == null) - typesXarFile = xarDirPath.resolve(file); + typesXarFile = xarDirPath.resolveChild(file); else log.error("More than one types XAR file found in the sample type directory: ", file); } diff --git a/experiment/src/org/labkey/experiment/xar/CompressedXarSource.java b/experiment/src/org/labkey/experiment/xar/CompressedXarSource.java index 9463c79c15d..2c8ed5d4738 100644 --- a/experiment/src/org/labkey/experiment/xar/CompressedXarSource.java +++ b/experiment/src/org/labkey/experiment/xar/CompressedXarSource.java @@ -24,6 +24,7 @@ import org.labkey.api.pipeline.PipelineJob; import org.labkey.api.util.FileUtil; import org.labkey.api.writer.ZipUtil; +import org.labkey.vfs.FileLike; import java.io.IOException; import java.nio.file.Files; @@ -38,9 +39,9 @@ */ public class CompressedXarSource extends AbstractFileXarSource { - private final Path _xarFile; + private final FileLike _xarFile; - public CompressedXarSource(Path xarFile, PipelineJob job) + public CompressedXarSource(FileLike xarFile, PipelineJob job) { super(job); _xarFile = xarFile; @@ -53,13 +54,13 @@ public CompressedXarSource(Path xarFile, PipelineJob job) * This may not be the same as the the Container returned by job.getContainer(). * @param substitutions Additional context substitutions */ - public CompressedXarSource(Path xarFile, PipelineJob job, Container targetContainer, @Nullable Map substitutions) + public CompressedXarSource(FileLike xarFile, PipelineJob job, Container targetContainer, @Nullable Map substitutions) { super(job.getDescription(), targetContainer, job.getUser(), job, substitutions); _xarFile = xarFile; } - public CompressedXarSource(Path xarFile, PipelineJob job, Container targetContainer) + public CompressedXarSource(FileLike xarFile, PipelineJob job, Container targetContainer) { this(xarFile, job, targetContainer, null); } @@ -67,19 +68,19 @@ public CompressedXarSource(Path xarFile, PipelineJob job, Container targetContai @Override public void init() throws ExperimentException, IOException { - Path outputDir = _xarFile.resolve(_xarFile + ".exploded"); + FileLike outputDir = _xarFile.getParent().resolveChild(_xarFile.getName() + ".exploded"); FileUtil.deleteDir(outputDir); - if (Files.exists(outputDir)) + if (outputDir.exists()) { throw new ExperimentException("Failed to clean up old directory " + outputDir); } FileUtil.createDirectories(outputDir); - if (!Files.isDirectory(outputDir)) + if (!outputDir.isDirectory()) { throw new ExperimentException("Failed to create directory " + outputDir); } - List xarContents; + List xarContents; try { xarContents = ZipUtil.unzipToDirectory(_xarFile, outputDir); @@ -89,7 +90,7 @@ public void init() throws ExperimentException, IOException throw new ExperimentException("Failed to extract XAR file: " + _xarFile, e); } - List xarFiles = xarContents.stream().filter(f -> f.getFileName().toString().toLowerCase().endsWith(".xar.xml")).collect(Collectors.toList()); + List xarFiles = xarContents.stream().filter(f -> f.getName().toLowerCase().endsWith(".xar.xml")).toList(); if (xarFiles.isEmpty()) { @@ -106,7 +107,7 @@ else if (xarFiles.size() > 1) } @Override - public Path getLogFilePath() + public FileLike getLogFilePath() { try { diff --git a/experiment/src/org/labkey/experiment/xar/FolderXarImporterFactory.java b/experiment/src/org/labkey/experiment/xar/FolderXarImporterFactory.java index b430a42a436..18326f029fd 100644 --- a/experiment/src/org/labkey/experiment/xar/FolderXarImporterFactory.java +++ b/experiment/src/org/labkey/experiment/xar/FolderXarImporterFactory.java @@ -32,8 +32,8 @@ import org.labkey.api.writer.VirtualFile; import org.labkey.experiment.XarReader; import org.labkey.experiment.pipeline.ExperimentPipelineJob; - -import java.nio.file.Path; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; /** * User: vsharma @@ -100,7 +100,7 @@ public void process(PipelineJob job, FolderImportContext ctx, VirtualFile root) throw e; } - Path xarFile = xarSourceWrapper.getXarFile(); + FileLike xarFile = xarSourceWrapper.getXarFile(); if (xarFile == null) { ctx.getLogger().error("Could not find a xar file in the xar directory."); @@ -120,7 +120,7 @@ public void process(PipelineJob job, FolderImportContext ctx, VirtualFile root) job = new ExperimentPipelineJob(bgInfo, xarFile, "Xar import", false, pipeRoot) { @Override - protected XarSource createXarSource(Path file) + protected XarSource createXarSource(FileLike file) { // Assume this is a .xar or a .zip file return xarSourceWrapper.getXarSource(this); @@ -165,7 +165,7 @@ private static class FolderExportXarSourceWrapper private final VirtualFile _xarDir; private final FolderImportContext _importContext; - private Path _xarFile; + private FileLike _xarFile; private XarSource _xarSource; public FolderExportXarSourceWrapper(VirtualFile xarDir, FolderImportContext ctx) @@ -185,13 +185,13 @@ public void init() { if (file.toLowerCase().endsWith(".xar") || file.toLowerCase().endsWith(".xar.xml")) { - _xarFile = FileUtil.getPath(_importContext.getContainer(), FileUtil.createUri(_xarDir.getLocation())).resolve(file); + _xarFile = FileSystemLike.wrapFile(FileUtil.getPath(_importContext.getContainer(), FileUtil.createUri(_xarDir.getLocation())).resolve(file)); break; } } } - public Path getXarFile() + public FileLike getXarFile() { return _xarFile; } @@ -200,7 +200,7 @@ public XarSource getXarSource(PipelineJob job) { if (_xarSource == null) { - if (getXarFile().getFileName().toString().toLowerCase().endsWith(".xar.xml")) + if (getXarFile().getName().toLowerCase().endsWith(".xar.xml")) { _xarSource = new FileXarSource( getXarFile(), diff --git a/filecontent/src/org/labkey/filecontent/FileContentServiceImpl.java b/filecontent/src/org/labkey/filecontent/FileContentServiceImpl.java index da9fe34706a..2836818c539 100644 --- a/filecontent/src/org/labkey/filecontent/FileContentServiceImpl.java +++ b/filecontent/src/org/labkey/filecontent/FileContentServiceImpl.java @@ -93,6 +93,7 @@ import org.labkey.api.view.template.Warnings; import org.labkey.api.webdav.WebdavResource; import org.labkey.api.webdav.WebdavService; +import org.labkey.vfs.FileLike; import java.beans.PropertyChangeEvent; import java.io.BufferedWriter; @@ -1374,6 +1375,13 @@ public String getAbsolutePathFromDataFileUrl(String dataFileUrl, Container conta return FileUtil.getAbsolutePath(container, FileUtil.createUri(dataFileUrl)); } + @Nullable + @Override + public URI getWebDavUrl(@NotNull FileLike path, @NotNull Container container, @NotNull PathType type) + { + return getWebDavUrl(path.toNioPathForRead(), container, type); + } + @Nullable @Override public URI getWebDavUrl(@NotNull java.nio.file.Path path, @NotNull Container container, @NotNull PathType type) diff --git a/pipeline/src/org/labkey/pipeline/PipelineController.java b/pipeline/src/org/labkey/pipeline/PipelineController.java index c2e296f1c4b..f47a38279c9 100644 --- a/pipeline/src/org/labkey/pipeline/PipelineController.java +++ b/pipeline/src/org/labkey/pipeline/PipelineController.java @@ -115,6 +115,7 @@ import org.labkey.pipeline.api.PipelineServiceImpl; import org.labkey.pipeline.api.PipelineStatusManager; import org.labkey.pipeline.status.StatusController; +import org.labkey.vfs.FileLike; import org.springframework.beans.MutablePropertyValues; import org.springframework.validation.BindException; import org.springframework.validation.Errors; @@ -1172,10 +1173,10 @@ public class ImportFolderFromPipelineAction extends SimpleRedirectAction _importContainers = new ArrayList<>(); private String _navTrail = "Import Folder"; - private java.nio.file.Path _archiveFile; + private FileLike _archiveFile; @Override public void validateCommand(StartFolderImportForm form, Errors errors) @@ -1221,7 +1222,8 @@ else if (form.getFilePath() == null) } else { - _archiveFile = PipelineManager.validateFolderImportFileNioPath(form.getFilePath(), currentPipelineRoot, errors); + // We no longer support absolute paths - should be relative to the pipeline root + _archiveFile = currentPipelineRoot.resolvePathToFileLike(form.getFilePath()); if (OptionalFeatureService.get().isFeatureEnabled(PipelineModule.ADVANCED_IMPORT_FLAG)) { @@ -1298,14 +1300,14 @@ public boolean handlePost(StartFolderImportForm form, BindException errors) thro { User user = getUser(); boolean success = true; - Map containerArchiveXmlMap = new HashMap<>(); + Map containerArchiveXmlMap = new HashMap<>(); - if (Files.exists(_archiveFile)) + if (_archiveFile.exists()) { // iterate over the selected containers, or just the current container in the default case, and unzip the archive if necessary for (Container container : _importContainers) { - java.nio.file.Path archiveXml = PipelineManager.getArchiveXmlFile(container, _archiveFile, "folder.xml", errors); + FileLike archiveXml = PipelineManager.getArchiveXmlFile(container, _archiveFile, "folder.xml", errors); if (errors.hasErrors()) return false; @@ -1342,12 +1344,12 @@ public boolean handlePost(StartFolderImportForm form, BindException errors) thro return success; } - private boolean createImportPipelineJob(Container container, User user, ImportOptions options, java.nio.file.Path archiveXml) + private boolean createImportPipelineJob(Container container, User user, ImportOptions options, FileLike archiveXml) { PipeRoot pipelineRoot = PipelineService.get().findPipelineRoot(container); ActionURL url = getViewContext().getActionURL(); - return PipelineService.get().runFolderImportJob(container, user, url, archiveXml, _archiveFile.getFileName().toString(), pipelineRoot, options); + return PipelineService.get().runFolderImportJob(container, user, url, archiveXml, _archiveFile.getName(), pipelineRoot, options); } @Override @@ -1721,11 +1723,11 @@ public ActionURL urlActions(Container container) } @Override - public ActionURL urlStartFolderImport(Container container, @NotNull java.nio.file.Path archiveFile, @Nullable ImportOptions options, boolean fromTemplateSourceFolder) + public ActionURL urlStartFolderImport(Container container, @NotNull FileLike archiveFile, @Nullable ImportOptions options, boolean fromTemplateSourceFolder) { ActionURL url = new ActionURL(StartFolderImportAction.class, container); - return addStartImportParameters(url, archiveFile, options, fromTemplateSourceFolder); + return addStartImportParameters(container, url, archiveFile, options, fromTemplateSourceFolder); } @Override @@ -1742,9 +1744,10 @@ public ActionURL urlCreatePipelineTrigger(Container container, String pipelineId return url; } - private ActionURL addStartImportParameters(ActionURL url, @NotNull java.nio.file.Path file, @Nullable ImportOptions options, boolean fromTemplateSourceFolder) + private ActionURL addStartImportParameters(Container container, ActionURL url, @NotNull FileLike file, @Nullable ImportOptions options, boolean fromTemplateSourceFolder) { - url.addParameter("filePath", file.toAbsolutePath().toString()); + PipeRoot pipelineRoot = PipelineService.get().findPipelineRoot(container); + url.addParameter("filePath", pipelineRoot.relativePath(file)); url.addParameter("validateQueries", options == null || !options.isSkipQueryValidation()); url.addParameter("createSharedDatasets", options == null || options.isCreateSharedDatasets()); if (options != null) diff --git a/pipeline/src/org/labkey/pipeline/PipelineModule.java b/pipeline/src/org/labkey/pipeline/PipelineModule.java index 6482353c6b2..405e7555c65 100644 --- a/pipeline/src/org/labkey/pipeline/PipelineModule.java +++ b/pipeline/src/org/labkey/pipeline/PipelineModule.java @@ -21,12 +21,15 @@ import org.labkey.api.admin.sitevalidation.SiteValidationService; import org.labkey.api.audit.AuditLogService; import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.data.CompareType; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerManager; import org.labkey.api.data.DbSchema; import org.labkey.api.data.RuntimeSQLException; import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; import org.labkey.api.data.SqlSelector; +import org.labkey.api.data.TableSelector; import org.labkey.api.data.dialect.SqlDialect; import org.labkey.api.files.FileContentService; import org.labkey.api.files.TableUpdaterFileListener; @@ -267,6 +270,7 @@ protected void startupAfterSpringConfig(ModuleContext moduleContext) result.put("jmsType", PipelineService.get().getJmsType().toString()); result.put("pipelineRootCount", PipelineService.get().getAllPipelineRoots().size()); + result.put("supplementalDirectories", new TableSelector(PipelineSchema.getInstance().getTableInfoPipelineRoots(), new SimpleFilter("SupplementalPath", null, CompareType.NONBLANK), null).getRowCount()); return result; }); diff --git a/pipeline/src/org/labkey/pipeline/analysis/AnalysisController.java b/pipeline/src/org/labkey/pipeline/analysis/AnalysisController.java index a59f1ba0ce4..0a53f7d1062 100644 --- a/pipeline/src/org/labkey/pipeline/analysis/AnalysisController.java +++ b/pipeline/src/org/labkey/pipeline/analysis/AnalysisController.java @@ -69,6 +69,7 @@ import org.labkey.api.view.NotFoundException; import org.labkey.api.view.ViewForm; import org.labkey.api.writer.ContainerUser; +import org.labkey.vfs.FileLike; import org.springframework.validation.BindException; import org.springframework.validation.Errors; import org.springframework.web.servlet.ModelAndView; @@ -77,7 +78,6 @@ import java.io.IOException; import java.io.StringReader; import java.nio.charset.Charset; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -201,7 +201,7 @@ public ApiResponse execute(AnalyzeForm form, BindException errors) PipelineService.PathAnalysisProperties props = PipelineService.get().getFileAnalysisProperties(getContainer(), form.getTaskId(), form.getPath()); AbstractFileAnalysisProtocol protocol = props.getFactory().getProtocol(props.getPipeRoot(), props.getDirData(), form.getProtocolName(), false); //NOTE: if protocol if null, initFileStatus() will return a result of UNKNOWN - Path dirAnalysis = props.getFactory().getAnalysisDir(props.getDirData(), form.getProtocolName(), props.getPipeRoot()); + FileLike dirAnalysis = props.getFactory().getAnalysisDir(props.getDirData(), form.getProtocolName(), props.getPipeRoot()); form.initStatus(protocol, props.getDirData(), dirAnalysis); boolean isRetry = false; @@ -251,17 +251,17 @@ public ApiResponse execute(AnalyzeForm form, BindException errors) if (wbRoot == null || !wbRoot.isValid()) continue; - File wbDirData = null; + FileLike wbDirData = null; if (form.getPath() != null) { - wbDirData = wbRoot.resolvePath(form.getPath()); + wbDirData = wbRoot.resolvePathToFileLike(form.getPath()); if (!NetworkDrive.exists(wbDirData)) continue; } - for (String protocolName : props.getFactory().getProtocolNames(wbRoot, wbDirData.toPath(), false)) + for (String protocolName : props.getFactory().getProtocolNames(wbRoot, wbDirData, false)) { - protocols.put(getProtocolJson(protocolName, wbRoot, wbDirData.toPath(), props.getFactory())); + protocols.put(getProtocolJson(protocolName, wbRoot, wbDirData, props.getFactory())); } } } @@ -273,7 +273,7 @@ public ApiResponse execute(AnalyzeForm form, BindException errors) return new ApiSimpleResponse(result); } - protected JSONObject getProtocolJson(String protocolName, PipeRoot root, @Nullable Path dirData, AbstractFileAnalysisProtocolFactory factory) throws NotFoundException + protected JSONObject getProtocolJson(String protocolName, PipeRoot root, @Nullable FileLike dirData, AbstractFileAnalysisProtocolFactory factory) throws NotFoundException { JSONObject protocol = new JSONObject(); AbstractFileAnalysisProtocol pipelineProtocol = factory.getProtocol(root, dirData, protocolName, false); diff --git a/pipeline/src/org/labkey/pipeline/analysis/FileAnalysisJob.java b/pipeline/src/org/labkey/pipeline/analysis/FileAnalysisJob.java index 76239b6bfff..aba982a95fc 100644 --- a/pipeline/src/org/labkey/pipeline/analysis/FileAnalysisJob.java +++ b/pipeline/src/org/labkey/pipeline/analysis/FileAnalysisJob.java @@ -26,10 +26,10 @@ import org.labkey.api.util.FileType; import org.labkey.api.util.NetworkDrive; import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.IOException; -import java.nio.file.Path; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -54,19 +54,18 @@ public FileAnalysisJob(FileAnalysisProtocol protocol, PipeRoot root, TaskId taskPipelineId, String protocolName, - Path fileParameters, - List filesInput, + FileLike fileParameters, + List filesInput, @Nullable Map variableMap, - boolean splittable, - boolean writeJobInfoFile) throws IOException + boolean splittable) throws IOException { - super(protocol, providerName, info, root, protocolName, fileParameters, filesInput, splittable, writeJobInfoFile); + super(protocol, providerName, info, root, protocolName, fileParameters, filesInput, splittable); _taskPipelineId = taskPipelineId; _variableMap = variableMap; } - public FileAnalysisJob(FileAnalysisJob job, File fileInput) + public FileAnalysisJob(FileAnalysisJob job, FileLike fileInput) { super(job, fileInput); @@ -101,7 +100,7 @@ public TaskId getTaskPipelineId() } @Override - public AbstractFileAnalysisJob createSingleFileJob(File file) + public AbstractFileAnalysisJob createSingleFileJob(FileLike file) { return new FileAnalysisJob(this, file); } diff --git a/pipeline/src/org/labkey/pipeline/analysis/FileAnalysisProtocol.java b/pipeline/src/org/labkey/pipeline/analysis/FileAnalysisProtocol.java index 9eae9d0db3f..f79a53aad95 100644 --- a/pipeline/src/org/labkey/pipeline/analysis/FileAnalysisProtocol.java +++ b/pipeline/src/org/labkey/pipeline/analysis/FileAnalysisProtocol.java @@ -23,9 +23,9 @@ import org.labkey.api.pipeline.file.AbstractFileAnalysisProtocol; import org.labkey.api.util.FileType; import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.vfs.FileLike; import java.io.IOException; -import java.nio.file.Path; import java.util.List; import java.util.Map; @@ -60,16 +60,15 @@ public void setFactory(FileAnalysisProtocolFactory factory) } @Override - public AbstractFileAnalysisJob createPipelineJob(ViewBackgroundInfo info, PipeRoot root, List filesInput, - Path fileParameters, @Nullable Map variableMap + public AbstractFileAnalysisJob createPipelineJob(ViewBackgroundInfo info, PipeRoot root, List filesInput, + FileLike fileParameters, @Nullable Map variableMap ) throws IOException { TaskId id = _factory.getPipeline().getId(); boolean splittable = _factory.getPipeline().isSplittable(); - boolean writeJobInfoFile = _factory.getPipeline().isWriteJobInfoFile(); return new FileAnalysisJob(this, FileAnalysisPipelineProvider.name, info, root, - id, getName(), fileParameters, filesInput, variableMap, splittable, writeJobInfoFile); + id, getName(), fileParameters, filesInput, variableMap, splittable); } } diff --git a/pipeline/src/org/labkey/pipeline/api/ParamParserImpl.java b/pipeline/src/org/labkey/pipeline/api/ParamParserImpl.java index b429281159e..64510b96d49 100644 --- a/pipeline/src/org/labkey/pipeline/api/ParamParserImpl.java +++ b/pipeline/src/org/labkey/pipeline/api/ParamParserImpl.java @@ -1,389 +1,393 @@ -/* - * Copyright (c) 2008-2017 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.pipeline.api; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.labkey.api.pipeline.ParamParser; -import org.labkey.api.util.StringUtilsLabKey; -import org.labkey.api.util.XmlBeansUtil; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; -import org.xml.sax.InputSource; -import org.xml.sax.SAXParseException; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.transform.OutputKeys; -import javax.xml.transform.Transformer; -import javax.xml.transform.TransformerException; -import javax.xml.transform.TransformerFactory; -import javax.xml.transform.dom.DOMSource; -import javax.xml.transform.stream.StreamResult; -import java.io.BufferedWriter; -import java.io.ByteArrayInputStream; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.StringWriter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * InputParser is used to parse a set of name-value pair - * input parameters from a XML document. The document uses the BioML - * document format, with values expressed as <note> tags, as in - * X! Tandem input files. - * - * See the X! Tandem API at: - * - * http://www.thegpm.org/TANDEM/api/index.html - * - * @author brendanx - */ - -public class ParamParserImpl implements ParamParser -{ - private static final Logger _log = LogManager.getLogger(ParamParserImpl.class); - - private static final String TAG_BIOML = "bioml"; - private static final String TAG_NOTE = "note"; - private static final String ATTR_LABEL = "label"; - private static final String ATTR_TYPE = "type"; - private static final String VAL_INPUT = "input"; - - private Document _doc; - private Validator _validator; - private List _errors; - - static public class ErrorImpl implements Error - { - String _message; - int _line; - int _column; - - public ErrorImpl(String message) - { - this(message, 0, 0); - } - - public ErrorImpl(String message, int line, int column) - { - _message = message; - _line = line; - _column = column; - } - - public ErrorImpl(SAXParseException spe) - { - this(spe.getLocalizedMessage(), spe.getLineNumber(), spe.getColumnNumber()); - } - - @Override - public String getMessage() - { - return _message; - } - - @Override - public int getLine() - { - return _line; - } - - @Override - public int getColumn() - { - return _column; - } - } - - protected void addError(Error error) - { - if (_errors == null) - _errors = new ArrayList<>(); - _errors.add(error); - } - - @Override - public void parse(InputStream inputStream) - { - try - { - DocumentBuilder db = XmlBeansUtil.DOCUMENT_BUILDER_FACTORY.newDocumentBuilder(); - - InputSource source = new InputSource(new InputStreamReader(inputStream)); - _doc = db.parse(source); - _doc.setXmlStandalone(true); // Added to help with new Transformer-based getXML() - validateDocument(); - } - catch (SAXParseException e) - { - // Subtract 1 from the line number, since we added the DOCTYPE line - addError(new ErrorImpl(e.getMessage(), e.getLineNumber(), e.getColumnNumber())); - } - catch (Exception e) - { - addError(new ErrorImpl(e.toString())); - } - finally - { - try { inputStream.close(); } catch (IOException ignored) {} - } - } - - @Override - public void setValidator(Validator validator) - { - _validator = validator; - } - - @Override - public void addError(String paramName, String message) - { - // TODO: use the paramName - addError(new ErrorImpl(message)); - } - - @Override - public Error[] getErrors() - { - if (_errors == null || _errors.isEmpty()) - return null; - return _errors.toArray(new ErrorImpl[0]); - } - - @Override - public String getXML() - { - // If nothing parsed yet, return the empty parameter set. - if (_doc == null) - return getXMLFromMap(new HashMap<>()); - - try - { - TransformerFactory factory = TransformerFactory.newInstance(); - Transformer trans = factory.newTransformer(); - trans.setOutputProperty(OutputKeys.INDENT, "yes"); - trans.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2"); - - //create string from xml tree - StringWriter sw = new StringWriter(); - StreamResult result = new StreamResult(sw); - DOMSource source = new DOMSource(_doc); - trans.transform(source, result); - - return sw.toString(); - } - catch (TransformerException e) - { - _log.error("Failure writing DOM document to string.", e); - } - - return null; - } - - - /** - * Override this function to further validate specific parameters. - */ - protected void validateDocument() - { - Element el = _doc.getDocumentElement(); - if (!TAG_BIOML.equals(el.getTagName())) - addError(new ErrorImpl("Root tag name should be 'bioml'")); - NodeList notes = el.getChildNodes(); - for (int i = 0; i < notes.getLength(); i++) - { - Node child = notes.item(i); - if (child.getNodeType() != Node.ELEMENT_NODE) - continue; - Element elNote = (Element) child; - if (!TAG_NOTE.equals(elNote.getNodeName())) - { - addError(new ErrorImpl("Tag '" + elNote.getNodeName() + "' not supported.")); - continue; - } - - String type = elNote.getAttribute(ATTR_TYPE); - if (type == null || type.isEmpty() || "description".equals(type)) - continue; - - if (!VAL_INPUT.equals(type)) - { - addError(new ErrorImpl("Note type '" + type + "' not supported.")); - continue; - } - } - - if (_validator != null) - _validator.validate(this); - } - - @Override - public String getInputParameter(String name) - { - Element el = _doc.getDocumentElement(); - NodeList notes = el.getElementsByTagName("note"); - for (int i = 0; i < notes.getLength(); i++) - { - Element elNote = (Element) notes.item(i); - if (isInputParameterElement(name, elNote)) - return elNote.getTextContent(); - } - return null; - } - - @Override - public void setInputParameter(String name, String value) - { - setInputParameter(name, value, null); - } - - @Override - public void setInputParameter(String name, String value, String before) - { - removeInputParameter(name); - - Element el = _doc.getDocumentElement(); - Element elParameter = _doc.createElement(TAG_NOTE); - elParameter.setAttribute(ATTR_TYPE, VAL_INPUT); - elParameter.setAttribute(ATTR_LABEL, name); - elParameter.setTextContent(value); - - Node beforeNode = null; - if (before != null) - { - NodeList notes = el.getElementsByTagName(TAG_NOTE); - for (int i = 0; i < notes.getLength(); i++) - { - Element elNote = (Element) notes.item(i); - if (isInputParameterElement(name, elNote)) - { - beforeNode = elNote; - break; - } - } - } - - if (beforeNode == null) - el.appendChild(elParameter); - else - el.insertBefore(elParameter, beforeNode); - } - - @Override - public void addInputParameters(Map parameters) - { - parameters.forEach((key, value) -> setInputParameter(key, value == null ? null : value.toString())); - } - - @Override - public String removeInputParameter(String name) - { - String value = null; - Element el = _doc.getDocumentElement(); - NodeList notes = el.getElementsByTagName(TAG_NOTE); - for (int i = 0; i < notes.getLength(); i++) - { - Element elNote = (Element) notes.item(i); - if (isInputParameterElement(name, elNote)) - { - value = elNote.getTextContent(); - el.removeChild(elNote); - break; - } - } - return value; - } - - @Override - public String[] getInputParameterNames() - { - ArrayList names = new ArrayList<>(); - Element el = _doc.getDocumentElement(); - NodeList notes = el.getElementsByTagName(TAG_NOTE); - for (int i = 0; i < notes.getLength(); i++) - { - Element elNote = (Element) notes.item(i); - if (VAL_INPUT.equals(elNote.getAttribute(ATTR_TYPE))) - { - names.add(elNote.getAttribute(ATTR_LABEL)); - } - } - return names.toArray(new String[0]); - } - - @Override - public Map getInputParameters() - { - Map parameters = new HashMap<>(); - if (_doc != null) - { - Element el = _doc.getDocumentElement(); - NodeList notes = el.getElementsByTagName(TAG_NOTE); - for (int i = 0; i < notes.getLength(); i++) - { - Element elNote = (Element) notes.item(i); - if (VAL_INPUT.equals(elNote.getAttribute(ATTR_TYPE))) - { - parameters.put(elNote.getAttribute(ATTR_LABEL), elNote.getTextContent()); - } - } - } - - return parameters; - } - - private boolean isInputParameterElement(String name, Element elNote) - { - String type = elNote.getAttribute(ATTR_TYPE); - return (VAL_INPUT.equals(type) && name.equals(elNote.getAttribute(ATTR_LABEL))); - } - - @Override - public String getXMLFromMap(Map params) - { - String xmlEmpty = "\n" + - "\n" + - ""; - parse(new ByteArrayInputStream(xmlEmpty.getBytes(StringUtilsLabKey.DEFAULT_CHARSET))); - String[] keys = params.keySet().toArray(new String[0]); - Arrays.sort(keys); - for (String key : keys) - setInputParameter(key, params.get(key)); - - return getXML(); - } - - @Override - public void writeFromMap(Map params, File fileDest) throws IOException - { - try (BufferedWriter inputWriter = new BufferedWriter(new FileWriter(fileDest))) - { - String xml = getXMLFromMap(params); - _log.debug("Writing " + params.size() + " parameters (" + fileDest + "):"); - _log.debug(xml); - inputWriter.write(xml); - } - } -} +/* + * Copyright (c) 2008-2017 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.pipeline.api; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.labkey.api.pipeline.ParamParser; +import org.labkey.api.util.StringUtilsLabKey; +import org.labkey.api.util.XmlBeansUtil; +import org.w3c.dom.Document; +import org.w3c.dom.Element; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.InputSource; +import org.xml.sax.SAXParseException; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.transform.OutputKeys; +import javax.xml.transform.Transformer; +import javax.xml.transform.TransformerException; +import javax.xml.transform.TransformerFactory; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; +import java.io.BufferedWriter; +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.StringWriter; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * InputParser is used to parse a set of name-value pair + * input parameters from a XML document. The document uses the BioML + * document format, with values expressed as <note> tags, as in + * X! Tandem input files. + * + * See the X! Tandem API at: + * + * http://www.thegpm.org/TANDEM/api/index.html + * + * @author brendanx + */ + +public class ParamParserImpl implements ParamParser +{ + private static final Logger _log = LogManager.getLogger(ParamParserImpl.class); + + private static final String TAG_BIOML = "bioml"; + private static final String TAG_NOTE = "note"; + private static final String ATTR_LABEL = "label"; + private static final String ATTR_TYPE = "type"; + private static final String VAL_INPUT = "input"; + + private Document _doc; + private Validator _validator; + private List _errors; + + static public class ErrorImpl implements Error + { + String _message; + int _line; + int _column; + + public ErrorImpl(String message) + { + this(message, 0, 0); + } + + public ErrorImpl(String message, int line, int column) + { + _message = message; + _line = line; + _column = column; + } + + public ErrorImpl(SAXParseException spe) + { + this(spe.getLocalizedMessage(), spe.getLineNumber(), spe.getColumnNumber()); + } + + @Override + public String getMessage() + { + return _message; + } + + @Override + public int getLine() + { + return _line; + } + + @Override + public int getColumn() + { + return _column; + } + } + + protected void addError(Error error) + { + if (_errors == null) + _errors = new ArrayList<>(); + _errors.add(error); + } + + @Override + public void parse(InputStream inputStream) + { + if (inputStream != null) + { + try (inputStream) + { + try + { + DocumentBuilder db = XmlBeansUtil.DOCUMENT_BUILDER_FACTORY.newDocumentBuilder(); + + InputSource source = new InputSource(new InputStreamReader(inputStream)); + _doc = db.parse(source); + _doc.setXmlStandalone(true); // Added to help with new Transformer-based getXML() + validateDocument(); + } + catch (SAXParseException e) + { + // Subtract 1 from the line number, since we added the DOCTYPE line + addError(new ErrorImpl(e.getMessage(), e.getLineNumber(), e.getColumnNumber())); + } + catch (Exception e) + { + addError(new ErrorImpl(e.toString())); + } + } + catch (IOException ignored) + { + } + } + } + + @Override + public void setValidator(Validator validator) + { + _validator = validator; + } + + @Override + public void addError(String paramName, String message) + { + // TODO: use the paramName + addError(new ErrorImpl(message)); + } + + @Override + public Error[] getErrors() + { + if (_errors == null || _errors.isEmpty()) + return null; + return _errors.toArray(new Error[0]); + } + + @Override + public String getXML() + { + // If nothing parsed yet, return the empty parameter set. + if (_doc == null) + return getXMLFromMap(new HashMap<>()); + + try + { + TransformerFactory factory = TransformerFactory.newInstance(); + Transformer trans = factory.newTransformer(); + trans.setOutputProperty(OutputKeys.INDENT, "yes"); + trans.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2"); + + //create string from xml tree + StringWriter sw = new StringWriter(); + StreamResult result = new StreamResult(sw); + DOMSource source = new DOMSource(_doc); + trans.transform(source, result); + + return sw.toString(); + } + catch (TransformerException e) + { + _log.error("Failure writing DOM document to string.", e); + } + + return null; + } + + + /** + * Override this function to further validate specific parameters. + */ + protected void validateDocument() + { + Element el = _doc.getDocumentElement(); + if (!TAG_BIOML.equals(el.getTagName())) + addError(new ErrorImpl("Root tag name should be 'bioml'")); + NodeList notes = el.getChildNodes(); + for (int i = 0; i < notes.getLength(); i++) + { + Node child = notes.item(i); + if (child.getNodeType() != Node.ELEMENT_NODE) + continue; + Element elNote = (Element) child; + if (!TAG_NOTE.equals(elNote.getNodeName())) + { + addError(new ErrorImpl("Tag '" + elNote.getNodeName() + "' not supported.")); + continue; + } + + String type = elNote.getAttribute(ATTR_TYPE); + if (type == null || type.isEmpty() || "description".equals(type)) + continue; + + if (!VAL_INPUT.equals(type)) + { + addError(new ErrorImpl("Note type '" + type + "' not supported.")); + } + } + + if (_validator != null) + _validator.validate(this); + } + + @Override + public String getInputParameter(String name) + { + Element el = _doc.getDocumentElement(); + NodeList notes = el.getElementsByTagName("note"); + for (int i = 0; i < notes.getLength(); i++) + { + Element elNote = (Element) notes.item(i); + if (isInputParameterElement(name, elNote)) + return elNote.getTextContent(); + } + return null; + } + + @Override + public void setInputParameter(String name, String value) + { + setInputParameter(name, value, null); + } + + @Override + public void setInputParameter(String name, String value, String before) + { + removeInputParameter(name); + + Element el = _doc.getDocumentElement(); + Element elParameter = _doc.createElement(TAG_NOTE); + elParameter.setAttribute(ATTR_TYPE, VAL_INPUT); + elParameter.setAttribute(ATTR_LABEL, name); + elParameter.setTextContent(value); + + Node beforeNode = null; + if (before != null) + { + NodeList notes = el.getElementsByTagName(TAG_NOTE); + for (int i = 0; i < notes.getLength(); i++) + { + Element elNote = (Element) notes.item(i); + if (isInputParameterElement(name, elNote)) + { + beforeNode = elNote; + break; + } + } + } + + if (beforeNode == null) + el.appendChild(elParameter); + else + el.insertBefore(elParameter, beforeNode); + } + + @Override + public void addInputParameters(Map parameters) + { + parameters.forEach((key, value) -> setInputParameter(key, value == null ? null : value.toString())); + } + + @Override + public String removeInputParameter(String name) + { + String value = null; + Element el = _doc.getDocumentElement(); + NodeList notes = el.getElementsByTagName(TAG_NOTE); + for (int i = 0; i < notes.getLength(); i++) + { + Element elNote = (Element) notes.item(i); + if (isInputParameterElement(name, elNote)) + { + value = elNote.getTextContent(); + el.removeChild(elNote); + break; + } + } + return value; + } + + @Override + public String[] getInputParameterNames() + { + ArrayList names = new ArrayList<>(); + Element el = _doc.getDocumentElement(); + NodeList notes = el.getElementsByTagName(TAG_NOTE); + for (int i = 0; i < notes.getLength(); i++) + { + Element elNote = (Element) notes.item(i); + if (VAL_INPUT.equals(elNote.getAttribute(ATTR_TYPE))) + { + names.add(elNote.getAttribute(ATTR_LABEL)); + } + } + return names.toArray(new String[0]); + } + + @Override + public Map getInputParameters() + { + Map parameters = new HashMap<>(); + if (_doc != null) + { + Element el = _doc.getDocumentElement(); + NodeList notes = el.getElementsByTagName(TAG_NOTE); + for (int i = 0; i < notes.getLength(); i++) + { + Element elNote = (Element) notes.item(i); + if (VAL_INPUT.equals(elNote.getAttribute(ATTR_TYPE))) + { + parameters.put(elNote.getAttribute(ATTR_LABEL), elNote.getTextContent()); + } + } + } + + return parameters; + } + + private boolean isInputParameterElement(String name, Element elNote) + { + String type = elNote.getAttribute(ATTR_TYPE); + return (VAL_INPUT.equals(type) && name.equals(elNote.getAttribute(ATTR_LABEL))); + } + + @Override + public String getXMLFromMap(Map params) + { + String xmlEmpty = "\n" + + "\n" + + ""; + parse(new ByteArrayInputStream(xmlEmpty.getBytes(StringUtilsLabKey.DEFAULT_CHARSET))); + String[] keys = params.keySet().toArray(new String[0]); + Arrays.sort(keys); + for (String key : keys) + setInputParameter(key, params.get(key)); + + return getXML(); + } + + @Override + public void writeFromMap(Map params, File fileDest) throws IOException + { + try (BufferedWriter inputWriter = new BufferedWriter(new FileWriter(fileDest))) + { + String xml = getXMLFromMap(params); + _log.debug("Writing " + params.size() + " parameters (" + fileDest + "):"); + _log.debug(xml); + inputWriter.write(xml); + } + } +} diff --git a/pipeline/src/org/labkey/pipeline/api/PipeRootImpl.java b/pipeline/src/org/labkey/pipeline/api/PipeRootImpl.java index 51af9631832..26f52efb8fd 100644 --- a/pipeline/src/org/labkey/pipeline/api/PipeRootImpl.java +++ b/pipeline/src/org/labkey/pipeline/api/PipeRootImpl.java @@ -55,6 +55,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Objects; public class PipeRootImpl implements PipeRoot { @@ -146,44 +147,33 @@ public PipeRootImpl(PipelineRoot root) @Override @NotNull - public File ensureSystemDirectory() + public FileLike ensureSystemDirectory() { - Path path = ensureSystemDirectoryPath(); - if (FileUtil.hasCloudScheme(path)) - throw new RuntimeException("System Dir is not on file system."); - return path.toFile(); - } - - @Override - @NotNull - public Path ensureSystemDirectoryPath() - { - Path root = getRootNioPath(); - Path systemDir = root.resolve(SYSTEM_DIRECTORY_NAME); - if (!Files.exists(systemDir)) + FileLike root = getRootFileLike(); + FileLike systemDir = root.resolveChild(SYSTEM_DIRECTORY_NAME); + if (!systemDir.exists()) { try { FileUtil.createDirectories(systemDir); - - Path systemDirLegacy = root.resolve(SYSTEM_DIRECTORY_LEGACY); - if (Files.exists(systemDirLegacy)) + FileLike systemDirLegacy = root.resolveChild(SYSTEM_DIRECTORY_LEGACY); + if (systemDirLegacy.exists() && !isCloudRoot()) { // Legacy means it must be on file system - File legacyDir = systemDirLegacy.toFile(); - for (File f : legacyDir.listFiles()) - f.renameTo(systemDir.toFile()); + File sysDir = systemDirLegacy.toNioPathForRead().toFile(); + File legacyDir = systemDirLegacy.toNioPathForWrite().toFile(); + for (File f : Objects.requireNonNullElse(legacyDir.listFiles(),new File[0])) + f.renameTo(sysDir); } for (PipelineProvider provider : PipelineService.get().getPipelineProviders()) - provider.initSystemDirectory(root, systemDir); + provider.initSystemDirectory(root.toNioPathForWrite(), systemDir.toNioPathForWrite()); } catch (IOException e) { throw new RuntimeException(e); } } - return systemDir; } @@ -227,7 +217,11 @@ public Path getRootNioPath() @Override public @NotNull FileLike getRootFileLike() { - return new FileSystemLike.Builder(getRootPath()).readwrite().root(); + var ret = resolvePathToFileLike(""); + // this should not return null unless there a configuration problem. + if (null == ret) + throw new IllegalStateException("Could not resolve pipeline path."); + return ret; } @Override @@ -384,6 +378,15 @@ public File resolvePath(org.labkey.api.util.Path path) { var parsedPath = org.labkey.api.util.Path.parse(relativePath); + if (ROOT_BASE.cloud.equals(_defaultRoot)) + { + // Return the path to the default location + var combinedPath = StringUtils.isNotBlank(_uris.get(0).getPath()) ? + org.labkey.api.util.Path.parse(_uris.get(0).getPath()).append(parsedPath) : + parsedPath; + return CloudStoreService.get().getFileLike(getContainer(), _cloudStoreName, combinedPath); + } + var pair = _resolveRoot(parsedPath); if (null == pair) return null; @@ -479,28 +482,22 @@ public Path resolveToNioPathFromUrl(String url) return null; } - /** - * Get a local directory that can be used for importing (Read/Write) - * - * Cloud: Uses temp directory - * Default: Uses file root - */ @Override @NotNull - public File getImportDirectory() + public FileLike getImportDirectory() { // If pipeline root is in File system, return that; otherwise return temp directory - File root = isCloudRoot() ? - FileUtil.getTempDirectory() : - getRootPath(); - return FileUtil.appendName(root, PipelineService.UNZIP_DIR); + FileLike root = isCloudRoot() ? + FileUtil.getTempDirectoryFileLike() : + getRootFileLike(); + return root.resolveChild(PipelineService.UNZIP_DIR); } @Override - public Path deleteImportDirectory(@Nullable Logger logger) throws DirectoryNotDeletedException + public FileLike deleteImportDirectory(@Nullable Logger logger) throws DirectoryNotDeletedException { - Path importDir = getImportDirectory().toPath(); - if (Files.exists(importDir) && !FileUtil.deleteDir(importDir, logger)) + FileLike importDir = getImportDirectory(); + if (importDir.exists() && !FileUtil.deleteDir(importDir, logger)) { throw new DirectoryNotDeletedException("Could not delete the directory \"" + PipelineService.UNZIP_DIR + "\""); } diff --git a/pipeline/src/org/labkey/pipeline/api/PipelineManager.java b/pipeline/src/org/labkey/pipeline/api/PipelineManager.java index c1b2f30b6e3..44f2b9682bb 100644 --- a/pipeline/src/org/labkey/pipeline/api/PipelineManager.java +++ b/pipeline/src/org/labkey/pipeline/api/PipelineManager.java @@ -82,10 +82,10 @@ import org.labkey.folder.xml.FolderDocument; import org.labkey.pipeline.query.TriggerConfigurationsTable; import org.labkey.pipeline.status.StatusController; +import org.labkey.vfs.FileLike; import org.springframework.validation.BindException; import org.springframework.validation.Errors; -import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; @@ -149,11 +149,11 @@ public static PipelineRoot findPipelineRoot(@NotNull Container container, String } - static public PipelineRoot[] getPipelineRoots(String type) + static public List getPipelineRoots(String type) { SimpleFilter filter = new SimpleFilter(FieldKey.fromParts("Type"), type); - return new TableSelector(pipeline.getTableInfoPipelineRoots(), filter, null).getArray(PipelineRoot.class); + return new TableSelector(pipeline.getTableInfoPipelineRoots(), filter, null).getArrayList(PipelineRoot.class); } static public void setPipelineRoot(User user, Container container, URI[] roots, String type, @@ -809,32 +809,27 @@ else if (!pipeRoot.isCloudRoot() && !pipeRoot.isUnderRoot(archiveFile)) // T } @Nullable - private static Path expandZipLocally(PipeRoot pipelineRoot, Path archiveFile, BindException errors) + private static FileLike expandZipLocally(PipeRoot pipelineRoot, FileLike archiveFile, BindException errors) { try { // check if the archive file already exists in the unzip dir of this pipeline root - Path importDir = pipelineRoot.getImportDirectory().toPath(); - if (!archiveFile.getParent().toAbsolutePath().toString().equalsIgnoreCase(importDir.toAbsolutePath().toString())) + FileLike importDir = pipelineRoot.getImportDirectory(); + if (!archiveFile.getParent().equals(importDir)) importDir = pipelineRoot.deleteImportDirectory(null); - boolean shouldUnzip = Files.notExists(importDir); + boolean shouldUnzip = !importDir.exists(); if (!shouldUnzip) { - try (Stream pathStream = Files.list(importDir)) - { - shouldUnzip = pathStream.noneMatch(s -> s.getFileName().toString().equalsIgnoreCase(archiveFile.getFileName().toString())); - } + Stream pathStream = importDir.getChildren().stream(); + shouldUnzip = pathStream.noneMatch(s -> s.getName().equalsIgnoreCase(archiveFile.getName())); } if (shouldUnzip) { // Only unzip once - try (InputStream is = Files.newInputStream(archiveFile)) - { - ZipUtil.unzipToDirectory(is, importDir); - } + ZipUtil.unzipToDirectory(archiveFile, importDir); } return importDir; @@ -856,13 +851,13 @@ private static Path expandZipLocally(PipeRoot pipelineRoot, Path archiveFile, Bi return null; } - private static Path getImportXmlFile(@NotNull PipeRoot pipelineRoot, @NotNull Path archiveFile, @NotNull String xmlFileName, BindException errors) throws InvalidFileException + private static FileLike getImportXmlFile(@NotNull PipeRoot pipelineRoot, @NotNull FileLike archiveFile, @NotNull String xmlFileName, BindException errors) throws InvalidFileException { - Path xmlFile = archiveFile; + FileLike xmlFile = archiveFile; - if (archiveFile.getFileName().toString().toLowerCase().endsWith(".zip")) + if (archiveFile.getName().toLowerCase().endsWith(".zip")) { - Path importDir = expandZipLocally(pipelineRoot, archiveFile, errors); + FileLike importDir = expandZipLocally(pipelineRoot, archiveFile, errors); if (importDir != null) { xmlFile = getXmlFilePathFromArchive(importDir, archiveFile, xmlFileName); @@ -874,42 +869,42 @@ private static Path getImportXmlFile(@NotNull PipeRoot pipelineRoot, @NotNull Pa return xmlFile; } - public static @NotNull Path getXmlFilePathFromArchive(@NotNull Path importDir, Path archiveFile, @NotNull String xmlFileName) throws InvalidFileException + public static @NotNull FileLike getXmlFilePathFromArchive(@NotNull FileLike importDir, FileLike archiveFile, @NotNull String xmlFileName) throws InvalidFileException { // when importing a folder archive for a study, the study.xml file may not be at the root - if ("study.xml".equalsIgnoreCase(xmlFileName) && archiveFile.getFileName().toString().toLowerCase().endsWith(".folder.zip")) + if ("study.xml".equalsIgnoreCase(xmlFileName) && archiveFile.getName().toLowerCase().endsWith(".folder.zip")) { - File folderXml = new File(importDir.toFile(), "folder.xml"); + FileLike folderXml = importDir.resolveChild("folder.xml"); FolderDocument folderDoc; - try + try (InputStream in = folderXml.openInputStream()) { - folderDoc = FolderDocument.Factory.parse(folderXml, XmlBeansUtil.getDefaultParseOptions()); + folderDoc = FolderDocument.Factory.parse(in, XmlBeansUtil.getDefaultParseOptions()); XmlBeansUtil.validateXmlDocument(folderDoc, xmlFileName); } catch (Exception e) { - throw new InvalidFileException(folderXml.getParentFile().toPath(), folderXml.toPath(), e); + throw new InvalidFileException(folderXml.toString(), e); } if (folderDoc.getFolder().isSetStudy()) { - importDir = importDir.resolve(folderDoc.getFolder().getStudy().getDir()); + importDir = importDir.resolveFile(org.labkey.api.util.Path.parse(folderDoc.getFolder().getStudy().getDir())); } } - return importDir.toAbsolutePath().resolve(xmlFileName); + return importDir.resolveChild(xmlFileName); } - public static Path getArchiveXmlFile(Container container, Path archiveFile, String xmlFileName, BindException errors) throws InvalidFileException + public static FileLike getArchiveXmlFile(Container container, FileLike archiveFile, String xmlFileName, BindException errors) throws InvalidFileException { PipeRoot pipelineRoot = PipelineService.get().findPipelineRoot(container); - Path xmlFile = getImportXmlFile(pipelineRoot, archiveFile, xmlFileName, errors); + FileLike xmlFile = getImportXmlFile(pipelineRoot, archiveFile, xmlFileName, errors); // if this is an import from a source template folder that has been previously implicitly exported // to the unzip dir (without ever creating a zip file) then just look there for the xmlFile. - if (pipelineRoot != null && Files.isDirectory(archiveFile)) + if (pipelineRoot != null && archiveFile.isDirectory()) { - xmlFile = java.nio.file.Path.of(archiveFile.toString(), xmlFileName); + xmlFile = archiveFile.resolveChild(xmlFileName); } return xmlFile; diff --git a/pipeline/src/org/labkey/pipeline/api/PipelineServiceImpl.java b/pipeline/src/org/labkey/pipeline/api/PipelineServiceImpl.java index 3f2e1db3837..41a86ec5bfb 100644 --- a/pipeline/src/org/labkey/pipeline/api/PipelineServiceImpl.java +++ b/pipeline/src/org/labkey/pipeline/api/PipelineServiceImpl.java @@ -89,6 +89,7 @@ import org.labkey.pipeline.mule.ResumableDescriptor; import org.labkey.pipeline.status.PipelineQueryView; import org.labkey.pipeline.trigger.PipelineTriggerManager; +import org.labkey.vfs.FileLike; import org.mule.MuleManager; import org.mule.umo.UMODescriptor; import org.mule.umo.UMOException; @@ -320,10 +321,8 @@ public boolean hasValidPipelineRoot(Container container) @Override public Map getAllPipelineRoots() { - PipelineRoot[] pipelines = PipelineManager.getPipelineRoots(PRIMARY_ROOT); - Map result = new HashMap<>(); - for (PipelineRoot pipeline : pipelines) + for (PipelineRoot pipeline : PipelineManager.getPipelineRoots(PRIMARY_ROOT)) { PipeRoot p = new PipeRootImpl(pipeline); if (p.getContainer() != null) @@ -456,7 +455,7 @@ public List getClusterStartupArguments() { List args = new ArrayList<>(); args.add(System.getProperty("java.home") + "/bin/java" + (SystemUtils.IS_OS_WINDOWS ? ".exe" : "")); - File labkeyBootstrap = new File(new File(System.getProperty("catalina.home")), "labkeyBootstrap.jar"); + File labkeyBootstrap = FileUtil.appendName(new File(System.getProperty("catalina.home")), "labkeyBootstrap.jar"); if (!labkeyBootstrap.exists()) { @@ -754,7 +753,7 @@ public void setTriggeredTime(Container container, User user, int triggerConfigId } @Override - public boolean runFolderImportJob(Container c, User user, ActionURL url, Path folderXml, String originalFilename, PipeRoot pipelineRoot, ImportOptions options) + public boolean runFolderImportJob(Container c, User user, ActionURL url, FileLike folderXml, String originalFilename, PipeRoot pipelineRoot, ImportOptions options) { try { @@ -814,7 +813,7 @@ public boolean runGenerateFolderArchiveAndImportJob(Container c, User user, Acti public boolean runGenerateFolderArchiveAndImportJob(Container c, User user, ActionURL url, ImportOptions options) { PipeRoot pipelineRoot = PipelineService.get().findPipelineRoot(c); - Path folderXml = new File(pipelineRoot.getRootPath(), "folder.xml").toPath(); + FileLike folderXml = pipelineRoot.resolvePathToFileLike("folder.xml"); return runFolderImportJob(c, user, null, folderXml, "folder.xml", pipelineRoot, options); } @@ -848,10 +847,10 @@ public PathAnalysisProperties getFileAnalysisProperties(Container c, String task if (pr == null || !pr.isValid()) throw new NotFoundException(); - Path dirData = null; + FileLike dirData = null; if (path != null) { - dirData = pr.resolveToNioPath(path); + dirData = pr.resolvePathToFileLike(path); if (!NetworkDrive.exists(dirData)) throw new NotFoundException("Could not resolve path: " + path); } @@ -887,7 +886,7 @@ public String startFileAnalysis(AnalyzeForm form, @Nullable Map TaskPipeline taskPipeline = PipelineJobService.get().getTaskPipeline(form.getTaskId()); PathAnalysisProperties props = getFileAnalysisProperties(context.getContainer(), form.getTaskId(), form.getPath()); PipeRoot root = props.getPipeRoot(); - Path dirData = props.getDirData(); + FileLike dirData = props.getDirData(); AbstractFileAnalysisProtocolFactory factory = props.getFactory(); if (dirData == null) @@ -897,10 +896,10 @@ public String startFileAnalysis(AnalyzeForm form, @Nullable Map if (taskPipeline.isUseUniqueAnalysisDirectory()) { - dirData = FileUtil.appendName(dirData, form.getProtocolName() + "_" + FileUtil.getTimestamp()); - if (!Files.exists(FileUtil.createDirectories(dirData))) + dirData = dirData.resolveChild(form.getProtocolName() + "_" + FileUtil.getTimestamp()); + if (!FileUtil.createDirectory(dirData).exists()) { - throw new IOException("Failed to create unique analysis directory: " + FileUtil.getAbsoluteCaseSensitiveFile(dirData.toFile()).getAbsolutePath()); + throw new IOException("Failed to create unique analysis directory: " + FileUtil.getAbsoluteCaseSensitiveFile(dirData)); } } AbstractFileAnalysisProtocol protocol = factory.getProtocol(root, dirData, form.getProtocolName(), false); @@ -964,16 +963,16 @@ public String startFileAnalysis(AnalyzeForm form, @Nullable Map protocol.getFactory().ensureDefaultParameters(root); - Path fileParameters = protocol.getParametersFile(dirData, root); + FileLike fileParameters = protocol.getParametersFile(dirData, root); // Make sure configure.xml file exists for the job when it runs. - if (fileParameters != null && !Files.exists(fileParameters)) + if (fileParameters != null && !fileParameters.exists()) { protocol.setEmail(context.getUser().getEmail()); protocol.saveInstance(fileParameters, context.getContainer()); } boolean allowNonExistentFiles = form.isAllowNonExistentFiles() != null ? form.isAllowNonExistentFiles() : false; - List filesInputList = form.getValidatedPaths(context.getContainer(), allowNonExistentFiles); + List filesInputList = form.getValidatedFiles(context.getContainer(), allowNonExistentFiles); if (form.isActiveJobs()) { @@ -982,17 +981,17 @@ public String startFileAnalysis(AnalyzeForm form, @Nullable Map if (taskPipeline.isUseUniqueAnalysisDirectory()) { - for (Path inputFile : filesInputList) + for (FileLike inputFile : filesInputList) { try { - Files.move(inputFile, FileUtil.appendName(dirData, inputFile.getFileName().toString())); + Files.move(inputFile.toNioPathForWrite(), dirData.resolveChild(inputFile.getName()).toNioPathForWrite()); } catch (IOException e) { if (!allowNonExistentFiles) { - throw new IOException("Failed to move input file into unique directory: " + FileUtil.getAbsoluteCaseSensitivePath(context.getContainer(), inputFile).toAbsolutePath()); + throw new IOException("Failed to move input file into unique directory: " + FileUtil.getAbsoluteCaseSensitiveFile(inputFile)); } } } diff --git a/pipeline/src/org/labkey/pipeline/importer/FolderImportJob.java b/pipeline/src/org/labkey/pipeline/importer/FolderImportJob.java index 3e61809e97c..3be6e519445 100644 --- a/pipeline/src/org/labkey/pipeline/importer/FolderImportJob.java +++ b/pipeline/src/org/labkey/pipeline/importer/FolderImportJob.java @@ -39,6 +39,7 @@ import org.labkey.api.view.ViewBackgroundInfo; import org.labkey.api.writer.FileSystemFile; import org.labkey.api.writer.VirtualFile; +import org.labkey.vfs.FileLike; import java.nio.file.Path; @@ -69,10 +70,10 @@ protected FolderImportJob(@JsonProperty("_ctx") FolderImportContext ctx, @JsonPr _ctx.setLoggerGetter(new PipelineJobLoggerGetter(this)); } - public FolderImportJob(Container c, User user, ActionURL url, Path folderXml, String originalFilename, PipeRoot pipeRoot, ImportOptions options) + public FolderImportJob(Container c, User user, ActionURL url, FileLike folderXml, String originalFilename, PipeRoot pipeRoot, ImportOptions options) { super("FolderImport", new ViewBackgroundInfo(c, user, url), pipeRoot); - _root = new FileSystemFile(folderXml.getParent()); + _root = new FileSystemFile(folderXml.getParent().toNioPathForRead()); _originalFilename = originalFilename; _folderArchiveSourceName = options.getFolderArchiveSourceName(); // Optional FolderArchiveSource name. If non-null, will be invoked to generate the archive before import. setupLocalDirectoryAndJobLog(pipeRoot, "FolderImport", FolderImportProvider.generateLogFilename("folder_load")); @@ -112,7 +113,7 @@ public String getFolderArchiveSourceName() } @Override - public TaskPipeline getTaskPipeline() + public TaskPipeline getTaskPipeline() { return PipelineJobService.get().getTaskPipeline(new TaskId(FolderImportJob.class)); } diff --git a/pipeline/src/org/labkey/pipeline/importer/FolderImportTask.java b/pipeline/src/org/labkey/pipeline/importer/FolderImportTask.java index e31bf15e830..2d703a034ef 100644 --- a/pipeline/src/org/labkey/pipeline/importer/FolderImportTask.java +++ b/pipeline/src/org/labkey/pipeline/importer/FolderImportTask.java @@ -75,13 +75,13 @@ public RecordedActionSet run() throws PipelineJobException { FileAnalysisJobSupport support = job.getJobSupport(FileAnalysisJobSupport.class); ImportOptions options = new ImportOptions(job.getContainerId(), job.getUser().getUserId()); - options.setAnalysisDir(support.getDataDirectory().toPath()); + options.setAnalysisDir(support.getDataDirectoryFileLike()); - job = new FolderImportJob(job.getContainer(), job.getUser(), null, support.findInputPath(FOLDER_XML), FOLDER_XML, job.getPipeRoot(), options); + job = new FolderImportJob(job.getContainer(), job.getUser(), null, support.findInputFileLike(FOLDER_XML), FOLDER_XML, job.getPipeRoot(), options); job.setStatus(PipelineJob.TaskStatus.running.toString(), "Starting folder import job", true); importContext = ((FolderImportJob) job).getImportContext(); - vf = new FileSystemFile(support.getDataDirectory()); + vf = new FileSystemFile(support.getDataDirectoryFileLike()); } /* Standard Pipeline triggered job */ else @@ -165,7 +165,7 @@ public Factory() } @Override - public PipelineJob.Task createTask(PipelineJob job) + public FolderImportTask createTask(PipelineJob job) { return new FolderImportTask(this, job); } diff --git a/specimen/src/org/labkey/specimen/actions/SpecimenController.java b/specimen/src/org/labkey/specimen/actions/SpecimenController.java index d3d180977fb..2d05110bb03 100644 --- a/specimen/src/org/labkey/specimen/actions/SpecimenController.java +++ b/specimen/src/org/labkey/specimen/actions/SpecimenController.java @@ -186,6 +186,7 @@ import org.labkey.specimen.view.SpecimenRequestNotificationEmailTemplate; import org.labkey.specimen.view.SpecimenSearchWebPart; import org.labkey.specimen.view.SpecimenWebPart; +import org.labkey.vfs.FileLike; import org.springframework.validation.BindException; import org.springframework.validation.Errors; import org.springframework.validation.ObjectError; @@ -919,7 +920,7 @@ public boolean isMerge() } } - public static void submitSpecimenBatch(Container c, User user, ActionURL url, File f, PipeRoot root, boolean merge) throws IOException + public static void submitSpecimenBatch(Container c, User user, ActionURL url, FileLike f, PipeRoot root, boolean merge) throws IOException { if (null == f || !f.exists() || !f.isFile()) throw new NotFoundException(); @@ -942,7 +943,7 @@ public boolean handlePost(PipelineForm form, BindException errors) throws Except Container c = getContainer(); PipeRoot root = PipelineService.get().findPipelineRoot(c); boolean first = true; - for (File f : form.getValidatedFiles(c)) + for (FileLike f : form.getValidatedFiles(c)) { // Only possibly overwrite when the first archive is loaded: boolean merge = !first || form.isMerge(); @@ -977,13 +978,13 @@ public boolean handlePost(PipelineForm form, BindException errors) throws Except { Container c = getContainer(); String path = form.getPath(); - File f = null; + FileLike f = null; PipeRoot root = PipelineService.get().findPipelineRoot(c); if (path != null) { if (root != null) - f = root.resolvePath(path); + f = root.resolvePathToFileLike(path); } submitSpecimenBatch(c, getUser(), getViewContext().getActionURL(), f, root, form.isMerge()); @@ -1083,18 +1084,18 @@ public class ImportSpecimenDataAction extends SimpleViewAction @Override public ModelAndView getView(PipelineForm form, BindException bindErrors) { - List dataFiles = form.getValidatedFiles(getContainer()); + List dataFiles = form.getValidatedFiles(getContainer()); List archives = new ArrayList<>(); List errors = new ArrayList<>(); _filePaths = form.getFile(); - for (File dataFile : dataFiles) + for (FileLike dataFile : dataFiles) { if (null == dataFile || !dataFile.exists() || !dataFile.isFile()) { throw new NotFoundException(); } - if (!dataFile.canRead()) + if (!dataFile.toNioPathForRead().toFile().canRead()) errors.add("Can't read data file: " + dataFile); SpecimenArchive archive = new SpecimenArchive(dataFile); diff --git a/specimen/src/org/labkey/specimen/pipeline/SpecimenArchive.java b/specimen/src/org/labkey/specimen/pipeline/SpecimenArchive.java index dc43d69d266..781b8d8681a 100644 --- a/specimen/src/org/labkey/specimen/pipeline/SpecimenArchive.java +++ b/specimen/src/org/labkey/specimen/pipeline/SpecimenArchive.java @@ -19,8 +19,8 @@ import org.labkey.api.data.Container; import org.labkey.api.study.SpecimenService; import org.labkey.api.study.SpecimenTransform; +import org.labkey.vfs.FileLike; -import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Date; @@ -36,14 +36,14 @@ */ public class SpecimenArchive { - private final File _definitionFile; + private final FileLike _definitionFile; - public SpecimenArchive(File definitionFile) + public SpecimenArchive(FileLike definitionFile) { _definitionFile = definitionFile; } - public File getDefinitionFile() + public FileLike getDefinitionFile() { return _definitionFile; } @@ -56,13 +56,13 @@ public List getEntryDescriptions(Container container) throws I { if (transform.getFileType().isType(_definitionFile)) { - entryList.add(new EntryDescription(_definitionFile.getName(), _definitionFile.length(), new Date(_definitionFile.lastModified()))); + entryList.add(new EntryDescription(_definitionFile.getName(), _definitionFile.getSize(), new Date(_definitionFile.getLastModified()))); return entryList; } } // standard non-transformed specimen archive - try (ZipFile zip = new ZipFile(_definitionFile)) + try (ZipFile zip = new ZipFile(_definitionFile.toNioPathForRead().toFile())) { Enumeration entries = zip.entries(); while (entries.hasMoreElements()) diff --git a/specimen/src/org/labkey/specimen/pipeline/SpecimenBatch.java b/specimen/src/org/labkey/specimen/pipeline/SpecimenBatch.java index e072c092d94..b9c6a2acbcd 100644 --- a/specimen/src/org/labkey/specimen/pipeline/SpecimenBatch.java +++ b/specimen/src/org/labkey/specimen/pipeline/SpecimenBatch.java @@ -29,6 +29,7 @@ import org.labkey.api.util.PageFlowUtil; import org.labkey.api.view.ActionURL; import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.Serializable; @@ -48,7 +49,7 @@ public class SpecimenBatch extends StudyBatch implements Serializable, SpecimenJ // For serialization protected SpecimenBatch() {} - public SpecimenBatch(ViewBackgroundInfo info, File definitionFile, PipeRoot root, boolean merge) + public SpecimenBatch(ViewBackgroundInfo info, FileLike definitionFile, PipeRoot root, boolean merge) { super(info, definitionFile, root); _isMerge = merge; @@ -78,7 +79,7 @@ public ActionURL getStatusHref() @Override public Path getSpecimenArchivePath() { - return _definitionFile.toPath(); + return _definitionFile.toNioPathForRead(); } @Override diff --git a/specimen/src/org/labkey/specimen/pipeline/SpecimenReloadJob.java b/specimen/src/org/labkey/specimen/pipeline/SpecimenReloadJob.java index c635f80c900..4ddbbb6e6ac 100644 --- a/specimen/src/org/labkey/specimen/pipeline/SpecimenReloadJob.java +++ b/specimen/src/org/labkey/specimen/pipeline/SpecimenReloadJob.java @@ -22,6 +22,7 @@ import org.labkey.api.study.SpecimenTransform; import org.labkey.api.util.FileUtil; import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.Serializable; @@ -41,13 +42,13 @@ public SpecimenReloadJob(ViewBackgroundInfo info, PipeRoot root, String transfor { super(info, null, root, false); - File logFile = new File(root.getRootPath(), FileUtil.makeFileNameWithTimestamp("specimen_reload", "log")); + FileLike logFile = root.resolvePathToFileLike(FileUtil.makeFileNameWithTimestamp("specimen_reload", "log")); setLogFile(logFile); _transformName = transformName; } @Override - public void setSpecimenArchive(File archiveFile) + public void setSpecimenArchive(FileLike archiveFile) { _definitionFile = archiveFile; } diff --git a/specimen/src/org/labkey/specimen/pipeline/SpecimenReloadJobSupport.java b/specimen/src/org/labkey/specimen/pipeline/SpecimenReloadJobSupport.java index 42f6a61a49c..2ecc7cd0306 100644 --- a/specimen/src/org/labkey/specimen/pipeline/SpecimenReloadJobSupport.java +++ b/specimen/src/org/labkey/specimen/pipeline/SpecimenReloadJobSupport.java @@ -16,6 +16,7 @@ package org.labkey.specimen.pipeline; import org.labkey.api.study.SpecimenTransform; +import org.labkey.vfs.FileLike; import java.io.File; @@ -24,7 +25,7 @@ */ public interface SpecimenReloadJobSupport extends SpecimenJobSupport { - void setSpecimenArchive(File archiveFile); + void setSpecimenArchive(FileLike archiveFile); String getSpecimenTransform(); diff --git a/specimen/src/org/labkey/specimen/pipeline/SpecimenReloadTask.java b/specimen/src/org/labkey/specimen/pipeline/SpecimenReloadTask.java index 9cd2bf6de14..6a88f0e86a9 100644 --- a/specimen/src/org/labkey/specimen/pipeline/SpecimenReloadTask.java +++ b/specimen/src/org/labkey/specimen/pipeline/SpecimenReloadTask.java @@ -58,8 +58,7 @@ public RecordedActionSet run() throws PipelineJobException PipeRoot root = PipelineService.get().findPipelineRoot(job.getContainer()); if (root != null) { - FileLike archiveFileLike = root.getRootFileLike().resolveChild(FileUtil.makeFileNameWithTimestamp("specimen_reload", transform.getFileType().getDefaultSuffix())); - File archive = FileSystemLike.toFile(archiveFileLike); + FileLike archive = root.getRootFileLike().resolveChild(FileUtil.makeFileNameWithTimestamp("specimen_reload", transform.getFileType().getDefaultSuffix())); transform.importFromExternalSource(job, support.getExternalImportConfig(), archive); support.setSpecimenArchive(archive); diff --git a/study/api-src/org/labkey/api/study/pipeline/StudyBatch.java b/study/api-src/org/labkey/api/study/pipeline/StudyBatch.java index 6e2564b018c..fe8382aee8c 100644 --- a/study/api-src/org/labkey/api/study/pipeline/StudyBatch.java +++ b/study/api-src/org/labkey/api/study/pipeline/StudyBatch.java @@ -24,6 +24,7 @@ import org.labkey.api.util.PageFlowUtil; import org.labkey.api.view.ActionURL; import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.vfs.FileLike; import java.io.File; import java.io.IOException; @@ -36,12 +37,12 @@ */ public abstract class StudyBatch extends PipelineJob implements Serializable { - protected File _definitionFile; + protected FileLike _definitionFile; // For serialization protected StudyBatch() {} - public StudyBatch(ViewBackgroundInfo info, File definitionFile, PipeRoot root) + public StudyBatch(ViewBackgroundInfo info, FileLike definitionFile, PipeRoot root) { super("Study", info, root); _definitionFile = definitionFile; @@ -74,9 +75,4 @@ public void submit() throws IOException throw new IOException(e); } } - - public File getDefinitionFile() - { - return _definitionFile; - } } diff --git a/study/src/org/labkey/study/controllers/StudyController.java b/study/src/org/labkey/study/controllers/StudyController.java index 4cd599b94ee..9b07e897a7a 100644 --- a/study/src/org/labkey/study/controllers/StudyController.java +++ b/study/src/org/labkey/study/controllers/StudyController.java @@ -287,6 +287,7 @@ import org.labkey.study.visitmanager.VisitManager; import org.labkey.study.visitmanager.VisitManager.VisitStatistic; import org.labkey.study.xml.DatasetsDocument; +import org.labkey.vfs.FileLike; import org.springframework.validation.BindException; import org.springframework.validation.Errors; import org.springframework.web.servlet.ModelAndView; @@ -3953,7 +3954,7 @@ public void validateCommand(ResetPipelinePathForm form, Errors errors) @Override public boolean handlePost(ResetPipelinePathForm form, BindException errors) throws Exception { - for (File f : form.getValidatedFiles(getContainer())) + for (FileLike f : form.getValidatedFiles(getContainer())) { if (f.isFile() && f.getName().endsWith(".lock")) { @@ -4120,7 +4121,7 @@ public ModelAndView getView(PipelinePathForm form, BindException errors) throws { Container c = getContainer(); - File definitionFile = form.getValidatedSingleFile(c); + File definitionFile = form.getValidatedSingleFile(c).toNioPathForRead().toFile(); path = form.getPath(); if (!path.endsWith("/")) { diff --git a/study/src/org/labkey/study/controllers/publish/PublishController.java b/study/src/org/labkey/study/controllers/publish/PublishController.java index 16a5e29c0e9..c675c1e3aaa 100644 --- a/study/src/org/labkey/study/controllers/publish/PublishController.java +++ b/study/src/org/labkey/study/controllers/publish/PublishController.java @@ -314,7 +314,7 @@ public AutoLinkPipelineJob(ViewBackgroundInfo info, @NotNull PipeRoot pipeRoot, _runIds = form.getRunId(); _autoLinkCategory = form.getAutoLinkCategory(); - setLogFile(FileUtil.appendName(pipeRoot.getRootPath(), FileUtil.makeFileNameWithTimestamp("auto_link_to_study", "log")).toPath()); + setLogFile(pipeRoot.resolvePathToFileLike(FileUtil.makeFileNameWithTimestamp("auto_link_to_study", "log"))); } @Override diff --git a/study/src/org/labkey/study/importer/CreateChildStudyPipelineJob.java b/study/src/org/labkey/study/importer/CreateChildStudyPipelineJob.java index c6e959829d7..5c98fa3eee2 100644 --- a/study/src/org/labkey/study/importer/CreateChildStudyPipelineJob.java +++ b/study/src/org/labkey/study/importer/CreateChildStudyPipelineJob.java @@ -327,7 +327,7 @@ public boolean run(ViewContext context) } finally { - if (!success && _destFolderCreated) + if (!success && _destFolderCreated && getDstContainer() != null) ContainerManager.delete(getDstContainer(), getUser()); } diff --git a/study/src/org/labkey/study/importer/StudyImporterFactory.java b/study/src/org/labkey/study/importer/StudyImporterFactory.java index 8519052b6d5..2692e0d40ad 100644 --- a/study/src/org/labkey/study/importer/StudyImporterFactory.java +++ b/study/src/org/labkey/study/importer/StudyImporterFactory.java @@ -155,7 +155,7 @@ public void process(@Nullable PipelineJob job, FolderImportContext ctx, VirtualF if (useLocalImportDir) { //TODO this should be done from the import context getSpecimenArchive specimenFile = job.getPipeRoot().getRootNioPath().relativize(specimenFile); - specimenFile = job.getPipeRoot().getImportDirectory().toPath().resolve(specimenFile); + specimenFile = job.getPipeRoot().getImportDirectory().toNioPathForRead().resolve(specimenFile); } SpecimenMigrationService.get().importSpecimenArchive(specimenFile, job, studyImportContext, false, false); diff --git a/study/src/org/labkey/study/model/StudyManager.java b/study/src/org/labkey/study/model/StudyManager.java index 207daa48924..409da6b5536 100644 --- a/study/src/org/labkey/study/model/StudyManager.java +++ b/study/src/org/labkey/study/model/StudyManager.java @@ -2795,9 +2795,9 @@ private void deleteStudyDesignData(Container c, User user, List study { for (TableInfo tinfo : studyDesignTables) { - if (tinfo instanceof FilteredTable) + if (tinfo instanceof FilteredTable ft) { - Table.delete(((FilteredTable)tinfo).getRealTable(), new SimpleFilter(FieldKey.fromParts("Container"), c)); + Table.delete(ft.getRealTable(), new SimpleFilter(FieldKey.fromParts("Container"), c)); } } } diff --git a/study/src/org/labkey/study/visitmanager/PurgeParticipantsJob.java b/study/src/org/labkey/study/visitmanager/PurgeParticipantsJob.java index 118dc10328a..59a7d774e41 100644 --- a/study/src/org/labkey/study/visitmanager/PurgeParticipantsJob.java +++ b/study/src/org/labkey/study/visitmanager/PurgeParticipantsJob.java @@ -29,7 +29,7 @@ public PurgeParticipantsJob() PurgeParticipantsJob(ViewBackgroundInfo info, PipeRoot pipeRoot) { super("StudyParticipantPurge", info, pipeRoot); - setLogFile(pipeRoot.getLogDirectoryFileLike(true).resolveChild(FileUtil.makeFileNameWithTimestamp("purge_participants", "log")).toNioPathForWrite()); + setLogFile(pipeRoot.getLogDirectoryFileLike(true).resolveChild(FileUtil.makeFileNameWithTimestamp("purge_participants", "log"))); } @Override