Skip to content

Commit

Permalink
Merge pull request #1201 from virtualcell/domain-mask-IJ
Browse files Browse the repository at this point in the history
Domain mask ij
  • Loading branch information
AvocadoMoon committed Mar 27, 2024
2 parents 048fad0 + 417c647 commit c03e91e
Show file tree
Hide file tree
Showing 6 changed files with 76 additions and 43 deletions.
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package org.vcell.api.common.events;

import java.util.ArrayList;
import java.util.HashMap;

public class ExportHumanReadableDataSpec {
public final String bioModelName;
Expand All @@ -10,17 +11,19 @@ public class ExportHumanReadableDataSpec {
public String serverSavedFileName;
public String applicationType;
public boolean nonSpatial;
public HashMap<Integer, String> subVolume;

public ExportHumanReadableDataSpec(String bioModelName, String applicationName, String simulationName,
ArrayList<String> differentParameterValues,
String serverSavedFileName, String applicationType, boolean nonSpatial){
String serverSavedFileName, String applicationType, boolean nonSpatial, HashMap<Integer, String> subVolume){
this.bioModelName = bioModelName;
this.applicationName = applicationName;
this.simulationName = simulationName;
this.differentParameterValues = differentParameterValues;
this.serverSavedFileName = serverSavedFileName;
this.applicationType = applicationType;
this.nonSpatial = nonSpatial;
this.subVolume = subVolume;
}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import javax.swing.border.LineBorder;

import cbit.vcell.export.server.*;
import cbit.vcell.geometry.SubVolume;
import cbit.vcell.solver.*;
import org.vcell.util.gui.GeneralGuiUtils;
import org.vcell.util.UserCancelException;
Expand Down Expand Up @@ -702,8 +703,12 @@ private ExportSpecs getExportSpecs() {
String serverSavedFileName = getExportSettings1().getFormatSpecificSpecs() instanceof N5Specs ? ((N5Specs) getExportSettings1().getFormatSpecificSpecs()).dataSetName : "";

boolean nonSpatial = sc.getGeometry().getDimension() == 0;
HashMap<Integer, String> subVolumes = new HashMap<>();
for(SubVolume subVolume: sc.getGeometry().getGeometrySpec().getSubVolumes()){
subVolumes.put(subVolume.getHandle(), subVolume.getName());
}
HumanReadableExportData humanReadableExportData = new HumanReadableExportData(getSimulation().getName(), sc.getName(), sc.getBioModel().getName(),
differentParameterValues, serverSavedFileName, sc.getApplicationType().name(), nonSpatial);
differentParameterValues, serverSavedFileName, sc.getApplicationType().name(), nonSpatial, subVolumes);
GeometrySpecs geometrySpecs = new GeometrySpecs(selections, getNormalAxis(), getSlice(), geoMode);
ExportSpecs exportSpecs = new ExportSpecs(
vcDataIdentifier,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;

public class HumanReadableExportData implements Serializable {
public final String simulationName;
Expand All @@ -15,23 +16,25 @@ public class HumanReadableExportData implements Serializable {
// File name that is saved by the user or server. In N5 case it'll be the dataset name. This way individual datasets can be automatically opened
public String serverSavedFileName;
public boolean nonSpatial;
public HashMap<Integer, String> subVolume;
public HumanReadableExportData(String simulationName, String applicationName, String biomodelName,
ArrayList<String> differentParameterValues,
String serverSavedFileName, String applicationType, boolean nonSpatial){
String serverSavedFileName, String applicationType, boolean nonSpatial, HashMap<Integer, String> subVolume){
this.simulationName = simulationName;
this.applicationName = applicationName;
this.biomodelName = biomodelName;
this.differentParameterValues = differentParameterValues;
this.serverSavedFileName = serverSavedFileName;
this.applicationType = applicationType;
this.nonSpatial = nonSpatial;
this.subVolume = subVolume;
}

public ExportHumanReadableDataSpec toJsonRep() {
return new ExportHumanReadableDataSpec(biomodelName, applicationName, simulationName, differentParameterValues, serverSavedFileName, applicationType, nonSpatial);
return new ExportHumanReadableDataSpec(biomodelName, applicationName, simulationName, differentParameterValues, serverSavedFileName, applicationType, nonSpatial, subVolume);
}
public static HumanReadableExportData fromJsonRep(ExportHumanReadableDataSpec rep) {
return new HumanReadableExportData(rep.simulationName, rep.applicationName, rep.bioModelName, rep.differentParameterValues,
rep.serverSavedFileName, rep.applicationType, rep.nonSpatial);
rep.serverSavedFileName, rep.applicationType, rep.nonSpatial, rep.subVolume);
}
}
23 changes: 15 additions & 8 deletions vcell-core/src/main/java/cbit/vcell/export/server/N5Exporter.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.DatasetAttributes;
import org.janelia.saalfeldlab.n5.DoubleArrayDataBlock;
import org.janelia.saalfeldlab.n5.N5FSWriter;
Expand Down Expand Up @@ -70,14 +71,17 @@ private ExportOutput exportToN5(OutputContext outputContext, long jobID, N5Specs
// output context expects a list of annotated functions, vcData seems to already have a set of annotated functions


int numVariables = variableNames.length;
int numVariables = variableNames.length + 1; //the extra variable length is for the mask generated for every N5 Export
CartesianMesh mesh = dataServer.getMesh(user, vcDataID);
int numTimes = timeSpecs.getEndTimeIndex() - timeSpecs.getBeginTimeIndex(); //end index is an actual index within the array and not representative of length
long[] dimensions = {mesh.getSizeX(), mesh.getSizeY(), numVariables, mesh.getSizeZ(), numTimes + 1};
// 51X, 51Y, 1Z, 1C, 2T
int[] blockSize = {mesh.getSizeX(), mesh.getSizeY(), 1, mesh.getSizeZ(), 1};


double[] mask = new double[mesh.getSizeX()*mesh.getSizeY()*mesh.getSizeZ()];
for (int i =0; i < mesh.getSizeX() * mesh.getSizeY() * mesh.getSizeZ(); i++){
mask[i] = (double) mesh.getSubVolumeFromVolumeIndex(i);
}

for (String variableName: variableNames){
DataIdentifier specie = getSpecificDI(variableName, outputContext);
Expand All @@ -101,19 +105,22 @@ private ExportOutput exportToN5(OutputContext outputContext, long jobID, N5Specs
// rewrite so that it still results in a tmp file does not raise File already exists error
N5FSWriter n5FSWriter = new N5FSWriter(getN5FileAbsolutePath(), new GsonBuilder());
DatasetAttributes datasetAttributes = new DatasetAttributes(dimensions, blockSize, org.janelia.saalfeldlab.n5.DataType.FLOAT64, n5Specs.getCompression());
HashMap<String, Object> additionalMetaData = new HashMap<>();

String dataSetName = n5Specs.dataSetName;

n5FSWriter.createDataset(dataSetName, datasetAttributes);
N5Specs.imageJMetaData(n5FSWriter, dataSetName, numVariables, blockSize[3], allTimes.length, additionalMetaData);
N5Specs.writeImageJMetaData(dimensions, blockSize, n5Specs.getCompression(),n5FSWriter, dataSetName, numVariables, blockSize[3], allTimes.length, exportSpecs.getHumanReadableExportData().subVolume);

//Create mask
for(int timeIndex = timeSpecs.getBeginTimeIndex(); timeIndex <= timeSpecs.getEndTimeIndex(); timeIndex++){
int normalizedTimeIndex = timeIndex - timeSpecs.getBeginTimeIndex();
DoubleArrayDataBlock doubleArrayDataBlock = new DoubleArrayDataBlock(blockSize, new long[]{0, 0, 0, 0, normalizedTimeIndex}, mask);
n5FSWriter.writeBlock(dataSetName, datasetAttributes, doubleArrayDataBlock);
}

for (int variableIndex=0; variableIndex < numVariables; variableIndex++){
for (int variableIndex=1; variableIndex < numVariables; variableIndex++){
for (int timeIndex=timeSpecs.getBeginTimeIndex(); timeIndex <= timeSpecs.getEndTimeIndex(); timeIndex++){

int normalizedTimeIndex = timeIndex - timeSpecs.getBeginTimeIndex();
double[] data = this.dataServer.getSimDataBlock(outputContext, user, this.vcDataID, variableNames[variableIndex], allTimes[timeIndex]).getData();
double[] data = this.dataServer.getSimDataBlock(outputContext, user, this.vcDataID, variableNames[variableIndex - 1], allTimes[timeIndex]).getData();
DoubleArrayDataBlock doubleArrayDataBlock = new DoubleArrayDataBlock(blockSize, new long[]{0, 0, variableIndex, 0, (normalizedTimeIndex)}, data);
n5FSWriter.writeBlock(dataSetName, datasetAttributes, doubleArrayDataBlock);
if(timeIndex % 3 == 0){
Expand Down
54 changes: 32 additions & 22 deletions vcell-core/src/main/java/cbit/vcell/export/server/N5Specs.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,15 @@
package cbit.vcell.export.server;

import cbit.vcell.math.MathException;
import cbit.vcell.simdata.VCData;
import com.google.gson.Gson;
import org.janelia.saalfeldlab.n5.*;
import org.vcell.util.DataAccessException;

import java.io.FileWriter;
import java.io.IOException;
import java.io.Serializable;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;

/**
Expand All @@ -30,6 +34,7 @@ public class N5Specs extends FormatSpecificSpecs implements Serializable {
public final String dataSetName;

public static String n5Suffix = "n5";
public static String maskingMetaDataName = "maskMapping";

public static enum CompressionLevel{
RAW,
Expand Down Expand Up @@ -88,31 +93,36 @@ public String toString() {
return "N5Specs: [compression: " + format + ", chunking: " + dataType + ", switchRowsColumns: " + "]";
}


public static void imageJMetaData(N5FSWriter n5FSWriter, String datasetPath, int numChannels, int zSlices, int timeLength, HashMap<String, Object> additionalMetData) throws MathException, DataAccessException {
HashMap<String, Object> metaData = new HashMap<>();
metaData.put("name", "TestName");
metaData.put("fps", 0.0);
metaData.put("frameInterval", 0.0);
metaData.put("pixelWidth", 1.0);
metaData.put("pixelHeight", 1.0);
metaData.put("pixelDepth", 1.0);
metaData.put("xOrigin", 0.0);
metaData.put("yOrigin", 0.0);
metaData.put("zOrigin", 0.0);
metaData.put("numChannels", numChannels); //
metaData.put("numSlices", zSlices);
metaData.put("numFrames", timeLength);
metaData.put("type", 2); //https://imagej.nih.gov/ij/developer/api/ij/ij/ImagePlus.html#getType() Grayscale with float types
metaData.put("unit", "uM"); //https://imagej.nih.gov/ij/developer/api/ij/ij/measure/Calibration.html#getUnit()
metaData.put("properties", additionalMetData);

public static void writeImageJMetaData(long[] dimensions, int[] blockSize, Compression compression, N5FSWriter n5FSWriter, String datasetName, int numChannels, int zSlices,
int timeLength, HashMap<Integer, String> maskMapping) throws MathException, DataAccessException {
try {
n5FSWriter.setAttributes(datasetPath, metaData);
} catch (N5Exception e) {
HashMap<String, String> compresssionMap = new HashMap<>(){{put("type", compression.getType().toLowerCase());}};
ImageJMetaData imageJMetaData = ImageJMetaData.generateDefaultRecord(dimensions, blockSize, compresssionMap, datasetName, numChannels, zSlices, timeLength, maskMapping);
Path path = Path.of(n5FSWriter.getURI().getPath(), datasetName, "attributes.json");
Gson gson = n5FSWriter.getGson();
String jsonRepresentation = gson.toJson(imageJMetaData, ImageJMetaData.class);
FileWriter fileWriter = new FileWriter(path.toFile());
fileWriter.write(jsonRepresentation);
fileWriter.close();
} catch (N5Exception | IOException e) {
throw new RuntimeException(e);
}

}

record ImageJMetaData(long[] dimensions ,int[] blockSize, HashMap<String, String> compression, String dataType, String name, double fps, double frameInterval, double pixelWidth,
double pixelHeight, double pixelDepth, double xOrigin, double yOrigin, double zOrigin, int numChannels, int numSlices, int numFrames,
int type, String unit, HashMap<Integer, String> maskMapping){

// https://github.com/saalfeldlab/n5
//https://imagej.nih.gov/ij/developer/api/ij/ij/ImagePlus.html#getType() Grayscale with float types
//https://imagej.nih.gov/ij/developer/api/ij/ij/measure/Calibration.html#getUnit()

public static ImageJMetaData generateDefaultRecord(long[] dimensions ,int[] blockSize, HashMap<String, String> compression, String dataSetName, int numChannels,
int numSlices, int numFrames, HashMap<Integer, String> maskMapping){
return new ImageJMetaData(dimensions, blockSize, compression, DataType.FLOAT64.name().toLowerCase() ,dataSetName, 0.0, 0.0,
1.0, 1.0, 1.0, 0.0, 0.0, 0.0, numChannels, numSlices, numFrames, 2, "uM", maskMapping);
}
}


Expand Down
21 changes: 13 additions & 8 deletions vcell-core/src/test/java/cbit/vcell/export/N5ExporterTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import cbit.vcell.solver.VCSimulationDataIdentifier;
import cbit.vcell.solver.VCSimulationIdentifier;
import cbit.vcell.solvers.CartesianMesh;
import com.google.gson.internal.LinkedTreeMap;
import org.janelia.saalfeldlab.n5.DataBlock;
import org.janelia.saalfeldlab.n5.*;
import org.junit.Before;
Expand All @@ -35,10 +36,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Objects;
import java.util.Random;
import java.util.*;

import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertSame;
Expand Down Expand Up @@ -185,6 +183,8 @@ public void makeN5Model(N5Specs.CompressionLevel compressionLevel, int startTime
double[] allTimes = dataServer.getDataSetTimes(testUser,n5Exporter.getVcDataID());
TimeSpecs timeSpecs = new TimeSpecs(startTimeIndex, endTimeIndex, allTimes, variableSpecs.getModeID());
ExportSpecs exportSpecs = new ExportSpecs(n5Exporter.getVcDataID(), ExportFormat.N5, variableSpecs, timeSpecs, geometrySpecs, n5Specs, "", "");
HashMap<Integer, String> dummyMaskInfo = new HashMap<>(){{put(0, "Dummy"); put(1, "Test");}};
exportSpecs.setExportMetaData(new HumanReadableExportData("", "", "", new ArrayList<>(), "", "", false, dummyMaskInfo));
FileDataContainerManager fileDataContainerManager = new FileDataContainerManager();

ExportOutput exportOutput = n5Exporter.makeN5Data(outputContext, 0, exportSpecs, fileDataContainerManager);
Expand Down Expand Up @@ -232,13 +232,18 @@ public void testMetaData() throws Exception {
this.initalizeModel(model);
this.makeN5Model(N5Specs.CompressionLevel.RAW, 0, times.length - 1, model);
//X, Y, T, Z, Channels
long[] controlDimensions = {modelMesh.getSizeX(), modelMesh.getSizeY(), variables.size(), modelMesh.getSizeZ(), times.length};
long[] controlDimensions = {modelMesh.getSizeX(), modelMesh.getSizeY(), variables.size() + 1, modelMesh.getSizeZ(), times.length};
// tests the metadata, and the metadata may be accurate but the actual raw array of data may be wrong
DatasetAttributes datasetAttributes = n5Reader.getDatasetAttributes(model);
long[] exportDimensions = datasetAttributes.getDimensions();
assertArrayEquals(controlDimensions, exportDimensions, "Testing dimension results for model " + model);
((N5FSReader) n5Reader).getAttributes(model);

LinkedTreeMap<String, String> dummyMaskInfo = (LinkedTreeMap<String, java.lang.String>)((N5FSReader) n5Reader).getAttribute(model, N5Specs.maskingMetaDataName, LinkedTreeMap.class);

assertSame(DataType.FLOAT64, datasetAttributes.getDataType(),"Data Type of model " + model);
assert("Dummy".equals(dummyMaskInfo.get("0")));
assert("Test".equals(dummyMaskInfo.get("1")));

int[] expectedBlockSize = {modelMesh.getSizeX(), modelMesh.getSizeY(), 1, modelMesh.getSizeZ(), 1};
assertArrayEquals(expectedBlockSize, datasetAttributes.getBlockSize(),"Block Size of model " + model);
Expand All @@ -261,7 +266,7 @@ public void testRandomTimeSlices() throws Exception {

for (int i = 0; i < variables.size(); i++){
for(int timeSlice = startTimeIndex; timeSlice <= attributesTimeSize; timeSlice++){
DataBlock<?> dataBlock = n5Reader.readBlock(model, datasetAttributes, new long[]{0, 0, i, 0, timeSlice - startTimeIndex});
DataBlock<?> dataBlock = n5Reader.readBlock(model, datasetAttributes, new long[]{0, 0, i + 1, 0, timeSlice - startTimeIndex});

double[] exportedRawData = (double[]) dataBlock.getData();
assertArrayEquals(
Expand Down Expand Up @@ -295,7 +300,7 @@ public void testRawDataEquivelance() throws Exception {
for(int i = 0; i < variables.size(); i++){
for(int timeSlice = 0; timeSlice < times.length; timeSlice++){
DatasetAttributes datasetAttributes = n5Reader.getDatasetAttributes(model);
DataBlock<?> dataBlock = n5Reader.readBlock(model, datasetAttributes, new long[]{0, 0, i, 0, timeSlice});
DataBlock<?> dataBlock = n5Reader.readBlock(model, datasetAttributes, new long[]{0, 0, i + 1, 0, timeSlice});

double[] exportedRawData = (double[]) dataBlock.getData();
assertArrayEquals(
Expand Down Expand Up @@ -329,7 +334,7 @@ public void testDataCompressionEquivelance() throws Exception {
for(int j = 0; j< 8; j++){
int timeSlice = random.nextInt(endTimeIndex);
int chosenVariable = random.nextInt(variables.size());
DataBlock<?> dataBlock = n5Reader.readBlock(model, datasetAttributes, new long[]{0, 0, chosenVariable, 0, timeSlice});
DataBlock<?> dataBlock = n5Reader.readBlock(model, datasetAttributes, new long[]{0, 0, chosenVariable + 1, 0, timeSlice});

double[] exportedData = (double[]) dataBlock.getData();
Assertions.assertArrayEquals(
Expand Down

0 comments on commit c03e91e

Please sign in to comment.