diff --git a/vcell-core/src/main/java/cbit/vcell/export/server/ASCIIExporter.java b/vcell-core/src/main/java/cbit/vcell/export/server/ASCIIExporter.java index c6a5da7fef..62809b36b8 100644 --- a/vcell-core/src/main/java/cbit/vcell/export/server/ASCIIExporter.java +++ b/vcell-core/src/main/java/cbit/vcell/export/server/ASCIIExporter.java @@ -1269,7 +1269,7 @@ public Collection makeASCIIData(OutputContext outputContext, JobRe return switch(asciiSpecs.getDataType()){ case PDE_VARIABLE_DATA -> exportPDEData( outputContext, - jobRequest.getJobID(), + jobRequest.getExportJobID(), user, dataServerImpl, exportSpecs.getVCDataIdentifier(), @@ -1282,7 +1282,7 @@ public Collection makeASCIIData(OutputContext outputContext, JobRe ); case ODE_VARIABLE_DATA -> exportODEData( outputContext, - jobRequest.getJobID(), + jobRequest.getExportJobID(), user, dataServerImpl, exportSpecs.getVCDataIdentifier(), @@ -1293,7 +1293,7 @@ public Collection makeASCIIData(OutputContext outputContext, JobRe ); case PDE_PARTICLE_DATA -> exportParticleData( outputContext, - jobRequest.getJobID(), + jobRequest.getExportJobID(), user, dataServerImpl, exportSpecs, diff --git a/vcell-core/src/main/java/cbit/vcell/export/server/ExportServiceImpl.java b/vcell-core/src/main/java/cbit/vcell/export/server/ExportServiceImpl.java index b62904ea54..ee626c9f84 100644 --- a/vcell-core/src/main/java/cbit/vcell/export/server/ExportServiceImpl.java +++ b/vcell-core/src/main/java/cbit/vcell/export/server/ExportServiceImpl.java @@ -16,10 +16,7 @@ import java.io.IOException; import java.io.ObjectOutputStream; import java.net.MalformedURLException; -import java.net.URI; import java.net.URL; -import java.net.URLEncoder; -import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.Hashtable; import java.util.zip.DataFormatException; @@ -208,7 +205,7 @@ public ExportEvent makeRemoteFile(OutputContext outputContext,User user, DataSer throw new DataAccessException("ERROR: user is null"); } JobRequest newExportJob = JobRequest.createExportJobRequest(user); - jobRequestIDs.put(new Long(newExportJob.getJobID()), user); + jobRequestIDs.put(new Long(newExportJob.getExportJobID()), user); if (lg.isTraceEnabled()) lg.trace("ExportServiceImpl.makeRemoteFile(): " + newExportJob + ", " + exportSpecs); String fileFormat = null; switch (exportSpecs.getFormat()) { @@ -236,7 +233,7 @@ public ExportEvent makeRemoteFile(OutputContext outputContext,User user, DataSer // fileFormat = "IMAGEJ"; // break; } - fireExportStarted(newExportJob.getJobID(), exportSpecs.getVCDataIdentifier(), fileFormat); + fireExportStarted(newExportJob.getExportJobID(), exportSpecs.getVCDataIdentifier(), fileFormat); try { @@ -281,11 +278,11 @@ public ExportEvent makeRemoteFile(OutputContext outputContext,User user, DataSer exportOutputs[0].writeDataToOutputStream(baos, fileDataContainerManager);//Get location of temp HDF5 file File tempHDF5File = new File(baos.toString()); // File downloadableHDF5File = new File(exportBaseDir +exportOutputs[0].getSimID() + exportOutputs[0].getDataID() + ".hdf5"); - File downloadableHDF5File = new File(exportBaseDir + newExportJob.getJobID() + ".hdf5"); + File downloadableHDF5File = new File(exportBaseDir + newExportJob.getExportJobID() + ".hdf5"); Files.copy(tempHDF5File, downloadableHDF5File); tempHDF5File.delete(); URL url = new URL(exportBaseURL + downloadableHDF5File.getName()); - return fireExportCompleted(newExportJob.getJobID(), exportSpecs.getVCDataIdentifier(), fileFormat, url.toString(),exportSpecs); + return fireExportCompleted(newExportJob.getExportJobID(), exportSpecs.getVCDataIdentifier(), fileFormat, url.toString(),exportSpecs); } return makeRemoteFile(fileFormat, exportBaseDir, exportBaseURL, exportOutputs, exportSpecs, newExportJob,fileDataContainerManager); case QUICKTIME: @@ -331,7 +328,7 @@ public ExportEvent makeRemoteFile(OutputContext outputContext,User user, DataSer } catch (Throwable exc) { lg.error(exc.getMessage(), exc); - fireExportFailed(newExportJob.getJobID(), exportSpecs.getVCDataIdentifier(), fileFormat, exc.getMessage()); + fireExportFailed(newExportJob.getExportJobID(), exportSpecs.getVCDataIdentifier(), fileFormat, exc.getMessage()); throw new DataAccessException(exc.getMessage()); } } @@ -345,7 +342,7 @@ private ExportEvent makeRemoteFile(String fileFormat, String exportBaseDir, Stri boolean exportValid = true; // check outputs and package into zip file - File zipFile = new File(exportBaseDir + newExportJob.getJobID() + ".zip"); + File zipFile = new File(exportBaseDir + newExportJob.getExportJobID() + ".zip"); BufferedOutputStream bout = new BufferedOutputStream(new FileOutputStream(zipFile)); ZipOutputStream zipOut = new ZipOutputStream(bout); try { @@ -376,7 +373,7 @@ private ExportEvent makeRemoteFile(String fileFormat, String exportBaseDir, Stri completedExportRequests.put(exportSpecs, newExportJob); if (lg.isTraceEnabled()) lg.trace("ExportServiceImpl.makeRemoteFile(): Successfully exported to file: " + zipFile.getName()); URL url = new URL(exportBaseURL + zipFile.getName()); - return fireExportCompleted(newExportJob.getJobID(), exportSpecs.getVCDataIdentifier(), fileFormat, url.toString(),exportSpecs); + return fireExportCompleted(newExportJob.getExportJobID(), exportSpecs.getVCDataIdentifier(), fileFormat, url.toString(),exportSpecs); } else { throw new DataFormatException("Export Server could not produce valid data !"); @@ -390,10 +387,10 @@ private ExportEvent makeRemoteFile(String fileFormat, String exportBaseDir, Stri */ private ExportEvent makeRemoteFile(String fileFormat, String exportBaseDir, String exportBaseURL, ExportOutput[] exportOutputs, ExportSpecs exportSpecs, JobRequest newExportJob,FileDataContainerManager fileDataContainerManager) throws DataFormatException, IOException, MalformedURLException { boolean exportValid = true; - fireExportAssembling(newExportJob.getJobID(), exportSpecs.getVCDataIdentifier(), fileFormat); + fireExportAssembling(newExportJob.getExportJobID(), exportSpecs.getVCDataIdentifier(), fileFormat); // check outputs and package into zip file - File zipFile = new File(exportBaseDir + newExportJob.getJobID() + ".zip"); + File zipFile = new File(exportBaseDir + newExportJob.getExportJobID() + ".zip"); FileOutputStream fileOut = new FileOutputStream(zipFile); BufferedOutputStream bos = new BufferedOutputStream(fileOut); ZipOutputStream zipOut = new ZipOutputStream(bos); @@ -419,7 +416,7 @@ private ExportEvent makeRemoteFile(String fileFormat, String exportBaseDir, Stri completedExportRequests.put(exportSpecs, newExportJob); if (lg.isTraceEnabled()) lg.trace("ExportServiceImpl.makeRemoteFile(): Successfully exported to file: " + zipFile.getName()); URL url = new URL(exportBaseURL + zipFile.getName()); - return fireExportCompleted(newExportJob.getJobID(), exportSpecs.getVCDataIdentifier(), fileFormat, url.toString(),exportSpecs); + return fireExportCompleted(newExportJob.getExportJobID(), exportSpecs.getVCDataIdentifier(), fileFormat, url.toString(),exportSpecs); } else { throw new DataFormatException("Export Server could not produce valid data !"); @@ -437,7 +434,7 @@ private ExportEvent makeRemoteFile_Unzipped(String fileFormat, String exportBase { //do the first file of exportOutputs separately (for VFRAP, there is only one export output) String extStr = "." + fileFormat; - File file = new File(exportBaseDir + newExportJob.getJobID() + extStr); + File file = new File(exportBaseDir + newExportJob.getExportJobID() + extStr); FileOutputStream fileOut = new FileOutputStream(file); BufferedOutputStream out= new BufferedOutputStream(fileOut); exportOutputs[0].writeDataToOutputStream(out,fileDataContainerManager); @@ -449,7 +446,7 @@ private ExportEvent makeRemoteFile_Unzipped(String fileFormat, String exportBase { if (exportOutputs[i].isValid()) { - File moreFile = new File(exportBaseDir + newExportJob.getJobID()+"_"+ i + extStr); + File moreFile = new File(exportBaseDir + newExportJob.getExportJobID()+"_"+ i + extStr); FileOutputStream moreFileOut = new FileOutputStream(moreFile); ObjectOutputStream moreOut= new ObjectOutputStream(moreFileOut); exportOutputs[i].writeDataToOutputStream(moreOut,fileDataContainerManager); @@ -470,7 +467,7 @@ private ExportEvent makeRemoteFile_Unzipped(String fileFormat, String exportBase completedExportRequests.put(exportSpecs, newExportJob); if (lg.isTraceEnabled()) lg.trace("ExportServiceImpl.makeRemoteFile(): Successfully exported to file: " + fileNames); URL url = new URL(exportBaseURL + fileNames); - return fireExportCompleted(newExportJob.getJobID(), exportSpecs.getVCDataIdentifier(), fileFormat, url.toString(),exportSpecs); + return fireExportCompleted(newExportJob.getExportJobID(), exportSpecs.getVCDataIdentifier(), fileFormat, url.toString(),exportSpecs); } else { throw new DataFormatException("Export Server could not produce valid data !"); @@ -484,9 +481,9 @@ private ExportEvent makeRemoteN5File(String fileFormat, String fileName, ExportO String url = PropertyLoader.getRequiredProperty(PropertyLoader.s3ExportBaseURLProperty); String uri = url + ":" + PropertyLoader.getRequiredProperty(PropertyLoader.s3ProxyExternalPort) + "/" + n5Exporter.getN5FilePathSuffix(); N5Specs n5Specs = (N5Specs) exportSpecs.getFormatSpecificSpecs(); - uri += "?dataSetName=" + URLEncoder.encode(n5Specs.dataSetName, StandardCharsets.UTF_8); + uri += "?dataSetName=" + newExportJob.getExportJobID(); if (lg.isTraceEnabled()) lg.trace("ExportServiceImpl.makeRemoteFile(): Successfully exported to file: " + fileName); - return fireExportCompleted(newExportJob.getJobID(), exportSpecs.getVCDataIdentifier(), fileFormat, uri, exportSpecs); + return fireExportCompleted(newExportJob.getExportJobID(), exportSpecs.getVCDataIdentifier(), fileFormat, uri, exportSpecs); } else { throw new DataFormatException("Export Server could not produce valid data !"); diff --git a/vcell-core/src/main/java/cbit/vcell/export/server/IMGExporter.java b/vcell-core/src/main/java/cbit/vcell/export/server/IMGExporter.java index 8d8eebbe03..e30858827d 100644 --- a/vcell-core/src/main/java/cbit/vcell/export/server/IMGExporter.java +++ b/vcell-core/src/main/java/cbit/vcell/export/server/IMGExporter.java @@ -143,7 +143,7 @@ public ExportOutput[] makeMediaData( } ExportOutput[] mediaArr = null; if(particleMode == FormatSpecificSpecs.PARTICLE_NONE){ - mediaArr = makeMedia(exportServiceImpl,outputContext,jobRequest.getJobID(),user,dataServerImpl,exportSpecs,clientTaskStatusSupport,null/*particleInfo*/,fileDataContainerManager); + mediaArr = makeMedia(exportServiceImpl,outputContext,jobRequest.getExportJobID(),user,dataServerImpl,exportSpecs,clientTaskStatusSupport,null/*particleInfo*/,fileDataContainerManager); }else if(particleMode == FormatSpecificSpecs.PARTICLE_SELECT){ // ParticleInfo particleInfo = checkParticles_unused(exportSpecs,user,dataServerImpl,jobRequest.getJobID()); try{ diff --git a/vcell-core/src/main/java/cbit/vcell/export/server/JobRequest.java b/vcell-core/src/main/java/cbit/vcell/export/server/JobRequest.java index cf9594de82..dfcca6d5e5 100644 --- a/vcell-core/src/main/java/cbit/vcell/export/server/JobRequest.java +++ b/vcell-core/src/main/java/cbit/vcell/export/server/JobRequest.java @@ -53,7 +53,7 @@ public static JobRequest createExportJobRequest(User user) { * Creation date: (4/3/2001 4:21:34 PM) * @return long */ -public long getJobID() { +public long getExportJobID() { return jobID; } /** diff --git a/vcell-core/src/main/java/cbit/vcell/export/server/N5Exporter.java b/vcell-core/src/main/java/cbit/vcell/export/server/N5Exporter.java index a3c7650bd1..125aae60c7 100644 --- a/vcell-core/src/main/java/cbit/vcell/export/server/N5Exporter.java +++ b/vcell-core/src/main/java/cbit/vcell/export/server/N5Exporter.java @@ -21,7 +21,6 @@ import org.apache.commons.codec.digest.DigestUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.janelia.saalfeldlab.n5.DataType; import org.janelia.saalfeldlab.n5.DatasetAttributes; import org.janelia.saalfeldlab.n5.DoubleArrayDataBlock; import org.janelia.saalfeldlab.n5.N5FSWriter; @@ -35,7 +34,6 @@ import java.security.MessageDigest; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; public class N5Exporter implements ExportConstants { @@ -105,15 +103,14 @@ private ExportOutput exportToN5(OutputContext outputContext, long jobID, N5Specs // rewrite so that it still results in a tmp file does not raise File already exists error N5FSWriter n5FSWriter = new N5FSWriter(getN5FileAbsolutePath(), new GsonBuilder()); DatasetAttributes datasetAttributes = new DatasetAttributes(dimensions, blockSize, org.janelia.saalfeldlab.n5.DataType.FLOAT64, n5Specs.getCompression()); - String dataSetName = n5Specs.dataSetName; - n5FSWriter.createDataset(dataSetName, datasetAttributes); - N5Specs.writeImageJMetaData(dimensions, blockSize, n5Specs.getCompression(),n5FSWriter, dataSetName, numVariables, blockSize[3], allTimes.length, exportSpecs.getHumanReadableExportData().subVolume); + n5FSWriter.createDataset(String.valueOf(jobID), datasetAttributes); + N5Specs.writeImageJMetaData(jobID, dimensions, blockSize, n5Specs.getCompression(), n5FSWriter, n5Specs.dataSetName, numVariables, blockSize[3], allTimes.length, exportSpecs.getHumanReadableExportData().subVolume); //Create mask for(int timeIndex = timeSpecs.getBeginTimeIndex(); timeIndex <= timeSpecs.getEndTimeIndex(); timeIndex++){ int normalizedTimeIndex = timeIndex - timeSpecs.getBeginTimeIndex(); DoubleArrayDataBlock doubleArrayDataBlock = new DoubleArrayDataBlock(blockSize, new long[]{0, 0, 0, 0, normalizedTimeIndex}, mask); - n5FSWriter.writeBlock(dataSetName, datasetAttributes, doubleArrayDataBlock); + n5FSWriter.writeBlock(String.valueOf(jobID), datasetAttributes, doubleArrayDataBlock); } for (int variableIndex=1; variableIndex < numVariables; variableIndex++){ @@ -122,7 +119,7 @@ private ExportOutput exportToN5(OutputContext outputContext, long jobID, N5Specs int normalizedTimeIndex = timeIndex - timeSpecs.getBeginTimeIndex(); double[] data = this.dataServer.getSimDataBlock(outputContext, user, this.vcDataID, variableNames[variableIndex - 1], allTimes[timeIndex]).getData(); DoubleArrayDataBlock doubleArrayDataBlock = new DoubleArrayDataBlock(blockSize, new long[]{0, 0, variableIndex, 0, (normalizedTimeIndex)}, data); - n5FSWriter.writeBlock(dataSetName, datasetAttributes, doubleArrayDataBlock); + n5FSWriter.writeBlock(String.valueOf(jobID), datasetAttributes, doubleArrayDataBlock); if(timeIndex % 3 == 0){ double progress = (double) (variableIndex + normalizedTimeIndex) / (numVariables + (numTimes * numVariables)); exportServiceImpl.fireExportProgress(jobID, vcDataID, N5Specs.n5Suffix.toUpperCase(), progress); @@ -166,17 +163,11 @@ public String getN5FilePathSuffix(){ public String getN5FileNameHash(){ return actualHash(vcDataID.getDataKey().toString(), String.valueOf(vcDataID.getJobIndex())); } - - public static String getN5FileNameHash(String simID, String jobID){ - return actualHash(simID, jobID); - } - private static String actualHash(String simID, String jobID) { MessageDigest sha256 = DigestUtils.getMd5Digest(); sha256.update(simID.getBytes(StandardCharsets.UTF_8)); -// sha256.update(jobID.getBytes(StandardCharsets.UTF_8)); - - return Hex.encodeHexString(sha256.digest()); + String hashString = Hex.encodeHexString(sha256.digest()); + return hashString.substring(17); } @@ -190,7 +181,7 @@ public ExportOutput makeN5Data(OutputContext outputContext, JobRequest jobReques if (formatSpecs instanceof N5Specs n5Specs){ return exportToN5( outputContext, - jobRequest.getJobID(), + jobRequest.getExportJobID(), n5Specs, exportSpecs, fileDataContainerManager diff --git a/vcell-core/src/main/java/cbit/vcell/export/server/N5Specs.java b/vcell-core/src/main/java/cbit/vcell/export/server/N5Specs.java index 2dfd17fa80..9a12889713 100644 --- a/vcell-core/src/main/java/cbit/vcell/export/server/N5Specs.java +++ b/vcell-core/src/main/java/cbit/vcell/export/server/N5Specs.java @@ -93,12 +93,12 @@ public String toString() { return "N5Specs: [compression: " + format + ", chunking: " + dataType + ", switchRowsColumns: " + "]"; } - public static void writeImageJMetaData(long[] dimensions, int[] blockSize, Compression compression, N5FSWriter n5FSWriter, String datasetName, int numChannels, int zSlices, + public static void writeImageJMetaData(long jobID,long[] dimensions, int[] blockSize, Compression compression, N5FSWriter n5FSWriter, String datasetName, int numChannels, int zSlices, int timeLength, HashMap maskMapping) throws MathException, DataAccessException { try { HashMap compresssionMap = new HashMap<>(){{put("type", compression.getType().toLowerCase());}}; ImageJMetaData imageJMetaData = ImageJMetaData.generateDefaultRecord(dimensions, blockSize, compresssionMap, datasetName, numChannels, zSlices, timeLength, maskMapping); - Path path = Path.of(n5FSWriter.getURI().getPath(), datasetName, "attributes.json"); + Path path = Path.of(n5FSWriter.getURI().getPath(), String.valueOf(jobID), "attributes.json"); Gson gson = n5FSWriter.getGson(); String jsonRepresentation = gson.toJson(imageJMetaData, ImageJMetaData.class); FileWriter fileWriter = new FileWriter(path.toFile()); diff --git a/vcell-core/src/main/java/cbit/vcell/export/server/RasterExporter.java b/vcell-core/src/main/java/cbit/vcell/export/server/RasterExporter.java index f3eb002e45..0c2b7008c4 100644 --- a/vcell-core/src/main/java/cbit/vcell/export/server/RasterExporter.java +++ b/vcell-core/src/main/java/cbit/vcell/export/server/RasterExporter.java @@ -478,7 +478,7 @@ public NrrdInfo[] makeRasterData(OutputContext outputContext,JobRequest jobReque throws RemoteException, DataAccessException, IOException { return exportPDEData( outputContext, - jobRequest.getJobID(), + jobRequest.getExportJobID(), user, dataServerImpl, exportSpecs.getVCDataIdentifier(), @@ -862,7 +862,7 @@ public ExportOutput[] makeUCDData(OutputContext outputContext,JobRequest jobRequ // for (int i = 0; i < variableSpecs.getVariableNames().length; i++){ for (int j = timeSpecs.getBeginTimeIndex(); j <= timeSpecs.getEndTimeIndex(); j++){ exportServiceImpl.fireExportProgress( - jobRequest.getJobID(), vcdID, "UCD", + jobRequest.getExportJobID(), vcdID, "UCD", (double)(j-timeSpecs.getBeginTimeIndex())/(double)(timeSpecs.getEndTimeIndex()-timeSpecs.getBeginTimeIndex()+1)); // String fileID = simID + "_Full_" + formatTime(timeSpecs.getAllTimes()[j]) + "time_" + variableSpecs.getVariableNames().length + "vars"; @@ -967,7 +967,7 @@ public ExportOutput[] makeVTKImageData(OutputContext outputContext,JobRequest jo Vector exportOutV = new Vector(); for (int j = timeSpecs.getBeginTimeIndex(); j <= timeSpecs.getEndTimeIndex(); j++){ exportServiceImpl.fireExportProgress( - jobRequest.getJobID(), vcdID, "VTKIMG", + jobRequest.getExportJobID(), vcdID, "VTKIMG", (double)(j-timeSpecs.getBeginTimeIndex())/(double)(timeSpecs.getEndTimeIndex()-timeSpecs.getBeginTimeIndex()+1)); StringBuffer sb = new StringBuffer(); @@ -1051,7 +1051,7 @@ public ExportOutput[] makeVTKUnstructuredData_Chombo(OutputContext outputContext ChomboVtkFileWriter chomboVTKFileWriter = new ChomboVtkFileWriter(); File[] vtkFiles = chomboVTKFileWriter.writeVtuExportFiles(chomboFiles, tmpDir, new ChomboVtkFileWriter.ProgressListener() { public void progress(double percentDone) { - exportServiceImpl.fireExportProgress(jobRequest.getJobID(), vcdID, "VTKUNSTR", percentDone); + exportServiceImpl.fireExportProgress(jobRequest.getExportJobID(), vcdID, "VTKUNSTR", percentDone); } }); @@ -1080,7 +1080,7 @@ public ExportOutput[] makeVTKUnstructuredData_VCell(OutputContext outputContext, CartesianMeshVtkFileWriter cartesianMeshVtkFileWriter = new CartesianMeshVtkFileWriter(); File[] vtkFiles = cartesianMeshVtkFileWriter.writeVtuExportFiles(vcellFiles, tmpDir, new CartesianMeshVtkFileWriter.ProgressListener() { public void progress(double percentDone) { - exportServiceImpl.fireExportProgress(jobRequest.getJobID(), vcdID, "VTKUNSTR", percentDone); + exportServiceImpl.fireExportProgress(jobRequest.getExportJobID(), vcdID, "VTKUNSTR", percentDone); } }); @@ -1113,7 +1113,7 @@ public ExportOutput[] makeVTKUnstructuredData(OutputContext outputContext,JobReq //for (int i = 0; i < variableSpecs.getVariableNames().length; i++){ for (int j = timeSpecs.getBeginTimeIndex(); j <= timeSpecs.getEndTimeIndex(); j++){ exportServiceImpl.fireExportProgress( - jobRequest.getJobID(), vcdID, "VTKUNSTR", + jobRequest.getExportJobID(), vcdID, "VTKUNSTR", (double)(j-timeSpecs.getBeginTimeIndex())/(double)(timeSpecs.getEndTimeIndex()-timeSpecs.getBeginTimeIndex()+1)); //String fileID = simID + "_Full_" + formatTime(timeSpecs.getAllTimes()[j]) + "time_" + variableSpecs.getVariableNames().length + "vars"; diff --git a/vcell-core/src/test/java/cbit/vcell/export/N5ExporterTest.java b/vcell-core/src/test/java/cbit/vcell/export/N5ExporterTest.java index 66bf441d4d..3590c3730a 100644 --- a/vcell-core/src/test/java/cbit/vcell/export/N5ExporterTest.java +++ b/vcell-core/src/test/java/cbit/vcell/export/N5ExporterTest.java @@ -25,15 +25,12 @@ import com.google.gson.internal.LinkedTreeMap; import org.janelia.saalfeldlab.n5.DataBlock; import org.janelia.saalfeldlab.n5.*; -import org.junit.Before; -import org.junit.BeforeClass; import org.junit.jupiter.api.*; import org.vcell.util.DataAccessException; import org.vcell.util.document.KeyValue; import org.vcell.util.document.User; import java.io.File; -import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.*; @@ -52,7 +49,6 @@ public class N5ExporterTest { private User testUser; private CartesianMesh modelMesh; private double[] times; - private static final int simulationJobId = 0; private static final String fourDModelID = "597714292"; private static final String fiveDModelID = "1136922340"; private final ArrayList testModels = new ArrayList<>(Arrays.asList( @@ -173,9 +169,10 @@ public void restore() throws IOException { } - public void makeN5Model(N5Specs.CompressionLevel compressionLevel, int startTimeIndex, int endTimeIndex, String modelID) throws Exception { + public void makeN5FileWithSpecificSimulationResults(N5Specs.CompressionLevel compressionLevel, int startTimeIndex, int endTimeIndex, String modelID) throws Exception { OutputContext outputContext = new OutputContext(new AnnotatedFunction[0]); + VariableSpecs variableSpecs = new VariableSpecs(variables.stream().map(di -> di.getName()).toList(), Integer.parseInt(modelID)); GeometrySpecs geometrySpecs = new GeometrySpecs(new SpatialSelection[0], 0, 0, 0); N5Specs n5Specs = new N5Specs(ExportConstants.DataType.PDE_VARIABLE_DATA, ExportFormat.N5, compressionLevel, modelID); @@ -187,7 +184,7 @@ public void makeN5Model(N5Specs.CompressionLevel compressionLevel, int startTime exportSpecs.setExportMetaData(new HumanReadableExportData("", "", "", new ArrayList<>(), "", "", false, dummyMaskInfo)); FileDataContainerManager fileDataContainerManager = new FileDataContainerManager(); - ExportOutput exportOutput = n5Exporter.makeN5Data(outputContext, 0, exportSpecs, fileDataContainerManager); + ExportOutput exportOutput = n5Exporter.makeN5Data(outputContext, Integer.parseInt(modelID), exportSpecs, fileDataContainerManager); if(n5Reader != null){ n5Reader.close(); @@ -195,10 +192,10 @@ public void makeN5Model(N5Specs.CompressionLevel compressionLevel, int startTime this.n5Reader = new N5FSReader(n5Exporter.getN5FileAbsolutePath()); } - public void initalizeModel(String simKeyID) throws IOException, DataAccessException, MathException { + public void setExportTestState(String simKeyID) throws IOException, DataAccessException, MathException { VCSimulationIdentifier vcSimulationIdentifier = simKeyID.equals(fourDModelID) ? new VCSimulationIdentifier(new KeyValue(fourDModelID), testUser) : new VCSimulationIdentifier(new KeyValue(fiveDModelID), testUser); - vcDataID = new VCSimulationDataIdentifier(vcSimulationIdentifier, simulationJobId); + vcDataID = new VCSimulationDataIdentifier(vcSimulationIdentifier, 0); n5Exporter.initalizeDataControllers(testUser, dataServer, vcDataID); dataIdentifiers = new ArrayList<>(Arrays.asList(dataServer.getDataIdentifiers(new OutputContext(new AnnotatedFunction[0]), testUser, vcDataID))); @@ -229,8 +226,8 @@ else if (simKeyID.equals(fiveDModelID)){ public void testMetaData() throws Exception { for(String model: testModels){ - this.initalizeModel(model); - this.makeN5Model(N5Specs.CompressionLevel.RAW, 0, times.length - 1, model); + this.setExportTestState(model); + this.makeN5FileWithSpecificSimulationResults(N5Specs.CompressionLevel.RAW, 0, times.length - 1, model); //X, Y, T, Z, Channels long[] controlDimensions = {modelMesh.getSizeX(), modelMesh.getSizeY(), variables.size() + 1, modelMesh.getSizeZ(), times.length}; // tests the metadata, and the metadata may be accurate but the actual raw array of data may be wrong @@ -253,14 +250,14 @@ public void testMetaData() throws Exception { @Test public void testRandomTimeSlices() throws Exception { for (String model: testModels){ - initalizeModel(model); + setExportTestState(model); for (int k=0; k<8; k++){ //try 8 randomly chosen time slice combinations Random random = new Random(); int startTimeIndex = random.nextInt(0, times.length); int endTimeIndex = random.nextInt(startTimeIndex, times.length); OutputContext outputContext = new OutputContext(new AnnotatedFunction[0]); - makeN5Model(N5Specs.CompressionLevel.RAW, startTimeIndex, endTimeIndex, model); + makeN5FileWithSpecificSimulationResults(N5Specs.CompressionLevel.RAW, startTimeIndex, endTimeIndex, model); DatasetAttributes datasetAttributes = n5Reader.getDatasetAttributes(model); long attributesTimeSize = startTimeIndex + (datasetAttributes.getDimensions()[4] - 1); //minus 1 since we are already starting at startTimeIndex @@ -292,10 +289,10 @@ public void testRawDataEquivelance() throws Exception { //each block is entire XYZ, broken in time and channels for(String model: testModels){ - this.initalizeModel(model); + this.setExportTestState(model); OutputContext outputContext = new OutputContext(new AnnotatedFunction[0]); int endTimeIndex = times.length - 1; - makeN5Model(N5Specs.CompressionLevel.RAW, 0, endTimeIndex, model); + makeN5FileWithSpecificSimulationResults(N5Specs.CompressionLevel.RAW, 0, endTimeIndex, model); for(int i = 0; i < variables.size(); i++){ for(int timeSlice = 0; timeSlice < times.length; timeSlice++){ @@ -326,9 +323,9 @@ public void testDataCompressionEquivelance() throws Exception { for (N5Specs.CompressionLevel compression: compressions){ for(String model: testModels){ - initalizeModel(model); + setExportTestState(model); int endTimeIndex = times.length - 1; - makeN5Model(compression, 0 , endTimeIndex, model); + makeN5FileWithSpecificSimulationResults(compression, 0 , endTimeIndex, model); OutputContext outputContext = new OutputContext(new AnnotatedFunction[0]); DatasetAttributes datasetAttributes = n5Reader.getDatasetAttributes(model); for(int j = 0; j< 8; j++){