diff --git a/docker/build/Dockerfile-api-dev b/docker/build/Dockerfile-api-dev
index 04fe73a18e..64dfc3a867 100644
--- a/docker/build/Dockerfile-api-dev
+++ b/docker/build/Dockerfile-api-dev
@@ -65,7 +65,6 @@ ENV softwareVersion=SOFTWARE-VERSION-NOT-SET \
smtp_port="smtp-port-not-set" \
smtp_emailaddress="smtp-emailaddress-not-set" \
simdataCacheSize="simdataCacheSize-not-set" \
- webDataPort="webDataPort-not-set" \
ssl_ignoreHostMismatch=true \
ssl_ignoreCertProblems=false \
serverPrefixV0="server-path-prefix-v0-not-set" \
@@ -115,7 +114,6 @@ ENTRYPOINT java \
-Dvcellapi.keystore.pswdfile="${keystorepswdfile}" \
-Dvcell.smtp.hostName="${smtp_hostname}" \
-Dvcell.smtp.port="${smtp_port}" \
- -Dvcelldata.web.server.port=${webDataPort} \
-Dvcell.smtp.emailAddress="${smtp_emailaddress}" \
-Dvcell.ssl.ignoreHostMismatch="${ssl_ignoreHostMismatch}" \
-Dvcell.ssl.ignoreCertProblems="${ssl_ignoreCertProblems}" \
diff --git a/docker/build/Dockerfile-data-dev b/docker/build/Dockerfile-data-dev
index 30d48b83ee..8d634ca4d1 100644
--- a/docker/build/Dockerfile-data-dev
+++ b/docker/build/Dockerfile-data-dev
@@ -58,7 +58,6 @@ ENV softwareVersion=SOFTWARE-VERSION-NOT-SET \
export_baseurl="export-baseurl-not-set" \
simdatadir_external=/path/to/external/simdata/ \
simdataCacheSize="simdataCacheSize-not-set" \
- webDataPort="webDataPort-not-set" \
servertype="servertype-not-set" \
s3ProxyPortExternal="80" \
s3export_baseURL="s3-export-baseurl-not-set"
@@ -110,6 +109,5 @@ ENTRYPOINT java \
-Dvcell.mongodb.host.internal=${mongodb_host_internal} \
-Dvcell.mongodb.port.internal=${mongodb_port_internal} \
-Dvcell.mongodb.database=${mongodb_database} \
- -Dvcelldata.web.server.port=${webDataPort} \
-cp "./lib/*" cbit.vcell.message.server.data.SimDataServerMain \
"${servertype}"
diff --git a/docker/build/Dockerfile-web-dev b/docker/build/Dockerfile-web-dev
deleted file mode 100644
index a35261b062..0000000000
--- a/docker/build/Dockerfile-web-dev
+++ /dev/null
@@ -1,85 +0,0 @@
-FROM eclipse-temurin:17 as jre-build
-
-# Create a custom Java runtime
-RUN $JAVA_HOME/bin/jlink \
- --add-modules ALL-MODULE-PATH \
- --strip-debug \
- --no-man-pages \
- --no-header-files \
- --compress=2 \
- --output /javaruntime
-
-# Define base image and copy in jlink created minimal Java 17 environment
-FROM debian:buster-slim
-ENV JAVA_HOME=/opt/java/openjdk
-ENV PATH "${JAVA_HOME}/bin:${PATH}"
-COPY --from=jre-build /javaruntime $JAVA_HOME
-
-RUN apt-get -y update && apt-get install -y bash nano wget apt-utils libfreetype6 fontconfig fonts-dejavu
-
-RUN DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends tzdata
-RUN unlink /etc/localtime || true
-RUN ln -s /usr/share/zoneinfo/America/New_York /etc/localtime
-
-RUN DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends tzdata
-RUN unlink /etc/localtime || true
-RUN ln -s /usr/share/zoneinfo/America/New_York /etc/localtime
-
-WORKDIR /usr/local/app
-
-COPY ./vcell-web/target/vcell-web-0.0.1-SNAPSHOT.jar \
- ./vcell-web/target/maven-jars/*.jar \
- ./lib/
-
-COPY ./pythonVtk ./pythonVtk
-COPY ./nativelibs/linux64 ./nativelibs/linux64
-COPY ./docker/build/vcell-web.log4j.xml .
-
-ENV softwareVersion=SOFTWARE-VERSION-NOT-SET \
- serverid=SITE \
- dburl="db-url-not-set" \
- dbdriver="db-driver-not-set" \
- dbuser="db-user-not-set" \
- export_baseurl="export-baseurl-not-set" \
- simdatadir_external=/path/to/external/simdata/ \
- simdataCacheSize="simdataCacheSize-not-set" \
- webDataPort="webDataPort-not-set"
-
-ENV dbpswdfile=/run/secrets/dbpswd \
- keystore=/run/secrets/keystorefile \
- keystorepswdfile=/run/secrets/keystorepswd
-
-
-VOLUME /simdata
-VOLUME /simdata_secondary
-VOLUME /exportdir
-
-EXPOSE 8000
-
-ENTRYPOINT java \
- -Xdebug -agentlib:jdwp=transport=dt_socket,address=*:8000,server=y,suspend=n \
- -XX:MaxRAMPercentage=100 \
-# -XX:+PrintFlagsFinal -XshowSettings:vm \
- -Djava.awt.headless=true \
- -Dvcell.softwareVersion="${softwareVersion}" \
- -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \
- -Dlog4j.configurationFile=/usr/local/app/vcell-web.log4j.xml \
- -Dvcell.server.id="${serverid}" \
- -Dvcell.server.dbConnectURL="${dburl}" \
- -Dvcell.server.dbDriverName="${dbdriver}" \
- -Dvcell.server.dbUserid="${dbuser}" \
- -Dvcell.db.pswdfile="${dbpswdfile}" \
- -Dvcell.python.executable=/usr/bin/python \
- -Dvcell.primarySimdatadir.internal=/simdata \
- -Dvcell.secondarySimdatadir.internal=/simdata_secondary \
- -Dvcell.primarySimdatadir.external="${simdatadir_external}" \
- -Dvcell.simdataCacheSize="${simdataCacheSize}" \
- -Dvcell.export.baseDir.internal=/exportdir \
- -Dvcell.export.baseURL="${export_baseurl}" \
- -Dvcell.installDir=/usr/local/app \
- -Dvcellapi.keystore.file="${keystore}" \
- -Dvcellapi.keystore.pswdfile="${keystorepswdfile}" \
- -Dvcelldata.web.server.port=${webDataPort} \
- -cp "./lib/*" org.vcell.web.MainService
-
-
\ No newline at end of file
diff --git a/docker/build/vcell-web.log4j.xml b/docker/build/vcell-web.log4j.xml
deleted file mode 100644
index b9babe8b16..0000000000
--- a/docker/build/vcell-web.log4j.xml
+++ /dev/null
@@ -1,32 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/docker/kustomize/config/devjim/api.env b/docker/kustomize/config/devjim/api.env
index 0d6a807a35..83397fb64e 100644
--- a/docker/kustomize/config/devjim/api.env
+++ b/docker/kustomize/config/devjim/api.env
@@ -1,7 +1,5 @@
simdataCacheSize=10000000
-webDataPort=55556
-
smtp_emailaddress=VCell_Support@uchc.edu
smtp_hostname=vdsmtp.cam.uchc.edu
smtp_port=25
diff --git a/docker/kustomize/config/devjim/data.env b/docker/kustomize/config/devjim/data.env
index 6c878223f3..f8fc4815a4 100644
--- a/docker/kustomize/config/devjim/data.env
+++ b/docker/kustomize/config/devjim/data.env
@@ -1,7 +1,5 @@
simdataCacheSize=10000000
-webDataPort=55556
-
s3ProxyPortExternal=8089
s3export_baseURL=https://localhost
diff --git a/docker/kustomize/config/stage/api.env b/docker/kustomize/config/stage/api.env
index 185b46b07e..5ecab23579 100644
--- a/docker/kustomize/config/stage/api.env
+++ b/docker/kustomize/config/stage/api.env
@@ -1,6 +1,5 @@
simdataCacheSize=10000000
-webDataPort=55556
serverPrefixV0=/api/v0
smtp_emailaddress=VCell_Support@uchc.edu
diff --git a/docker/kustomize/config/stage/data.env b/docker/kustomize/config/stage/data.env
index 6c878223f3..f8fc4815a4 100644
--- a/docker/kustomize/config/stage/data.env
+++ b/docker/kustomize/config/stage/data.env
@@ -1,7 +1,5 @@
simdataCacheSize=10000000
-webDataPort=55556
-
s3ProxyPortExternal=8089
s3export_baseURL=https://localhost
diff --git a/docker/swarm/docker-compose-small.yml b/docker/swarm/docker-compose-small.yml
index 715c1337d1..2575a0f090 100644
--- a/docker/swarm/docker-compose-small.yml
+++ b/docker/swarm/docker-compose-small.yml
@@ -22,8 +22,6 @@ services:
- mongodb_port_internal=27017
- mongodb_database=test
- - webDataPort=${VCELL_WEB_DATA_PORT}
-
- smtp_hostname=${VCELL_SMTP_HOSTNAME}
- smtp_port=${VCELL_SMTP_PORT}
- smtp_emailaddress=${VCELL_SMTP_EMAILADDRESS}
@@ -118,8 +116,6 @@ services:
- s3ProxyPortExternal=${VCELL_S3PROXY_PORT_EXTERNAL}
- s3export_baseURL=${VCELL_S3_EXPORT_BASEURL}
- - webDataPort=${VCELL_WEB_DATA_PORT}
-
- servertype=CombinedData
ports:
- "127.0.0.1:${VCELL_DEBUG_PORT_VCELL_DATA}:8000" # java remote debugging
diff --git a/docker/swarm/docker-compose.yml b/docker/swarm/docker-compose.yml
index 1d24c21c37..de7b3b6d54 100644
--- a/docker/swarm/docker-compose.yml
+++ b/docker/swarm/docker-compose.yml
@@ -24,8 +24,6 @@ services:
- mongodb_port_internal=27017
- mongodb_database=test
- - webDataPort=${VCELL_WEB_DATA_PORT}
-
- smtp_hostname=${VCELL_SMTP_HOSTNAME}
- smtp_port=${VCELL_SMTP_PORT}
- smtp_emailaddress=${VCELL_SMTP_EMAILADDRESS}
@@ -284,8 +282,6 @@ services:
- s3ProxyPortExternal=${VCELL_S3PROXY_PORT_EXTERNAL}
- s3export_baseURL=${VCELL_S3_EXPORT_BASEURL}
- - webDataPort=${VCELL_WEB_DATA_PORT}
-
- servertype=CombinedData
ports:
- "${VCELL_DEBUG_PORT_VCELL_DATA}:8000" # java remote debugging
diff --git a/docker/swarm/localconfig_mockslurm-not-used.sh b/docker/swarm/localconfig_mockslurm-not-used.sh
index c314b4c409..0c3c20d8bf 100755
--- a/docker/swarm/localconfig_mockslurm-not-used.sh
+++ b/docker/swarm/localconfig_mockslurm-not-used.sh
@@ -120,7 +120,6 @@ VCELL_EXPORTDIR_HOST=/Volumes/vcell/export/
VCELL_MAX_JOBS_PER_SCAN=100
VCELL_MAX_ODE_JOBS_PER_USER=100
VCELL_MAX_PDE_JOBS_PER_USER=40
-VCELL_WEB_DATA_PORT=55556
VCELL_SSH_CMD_TIMEOUT=10000
VCELL_SSH_CMD_RESTORE_TIMEOUT=5
@@ -193,6 +192,5 @@ VCELL_TAG=$VCELL_TAG
VCELL_UPDATE_SITE=http://vcell.org/webstart/${_site_camel}
VCELL_VERSION_NUMBER=$VCELL_VERSION_NUMBER
VCELL_VERSION=${_site_camel}_Version_${VCELL_VERSION_NUMBER}_build_${VCELL_BUILD_NUMBER}
-VCELL_WEB_DATA_PORT=$VCELL_WEB_DATA_PORT
EOF
diff --git a/docker/swarm/localconfig_realslurm_oracle.sh b/docker/swarm/localconfig_realslurm_oracle.sh
index c72a06d02b..d54f2bc0ab 100755
--- a/docker/swarm/localconfig_realslurm_oracle.sh
+++ b/docker/swarm/localconfig_realslurm_oracle.sh
@@ -145,7 +145,6 @@ VCELL_EXPORTDIR_HOST=/Volumes/vcell/export/
VCELL_MAX_JOBS_PER_SCAN=100
VCELL_MAX_ODE_JOBS_PER_USER=100
VCELL_MAX_PDE_JOBS_PER_USER=40
-VCELL_WEB_DATA_PORT=55556
VCELL_SSH_CMD_TIMEOUT=10000
VCELL_SSH_CMD_RESTORE_TIMEOUT=5
@@ -235,6 +234,5 @@ VCELL_TAG=$VCELL_TAG
VCELL_UPDATE_SITE=http://vcell.org/webstart/${_site_camel}
VCELL_VERSION_NUMBER=$VCELL_VERSION_NUMBER
VCELL_VERSION=${_site_camel}_Version_${VCELL_VERSION_NUMBER}_build_${VCELL_BUILD_NUMBER}
-VCELL_WEB_DATA_PORT=$VCELL_WEB_DATA_PORT
EOF
diff --git a/docker/swarm/localconfig_realslurm_oracle_zeke.sh b/docker/swarm/localconfig_realslurm_oracle_zeke.sh
index 2637816146..f546f4a555 100755
--- a/docker/swarm/localconfig_realslurm_oracle_zeke.sh
+++ b/docker/swarm/localconfig_realslurm_oracle_zeke.sh
@@ -148,7 +148,6 @@ VCELL_EXPORTDIR_HOST=/media/zeke/DiskDrive/Home/Work/CCAM/TempStorage/export
VCELL_MAX_JOBS_PER_SCAN=100
VCELL_MAX_ODE_JOBS_PER_USER=100
VCELL_MAX_PDE_JOBS_PER_USER=40
-VCELL_WEB_DATA_PORT=55556
VCELL_SSH_CMD_TIMEOUT=10000
VCELL_SSH_CMD_RESTORE_TIMEOUT=5
@@ -257,6 +256,6 @@ VCELL_UPDATE_SITE=http://vcell.org/webstart/${_site_camel}
VCELL_VERSION_NUMBER=$VCELL_VERSION_NUMBER
VCELL_VERSION=${_site_camel}_Version_${VCELL_VERSION_NUMBER}_build_${VCELL_BUILD_NUMBER}
-VCELL_WEB_DATA_PORT=$VCELL_WEB_DATA_PORT
+
EOF
diff --git a/docker/swarm/localconfig_realslurm_postgres.sh b/docker/swarm/localconfig_realslurm_postgres.sh
index 40e5802799..18b5fa1f3d 100755
--- a/docker/swarm/localconfig_realslurm_postgres.sh
+++ b/docker/swarm/localconfig_realslurm_postgres.sh
@@ -145,7 +145,6 @@ VCELL_EXPORTDIR_HOST=/Volumes/vcell/export/
VCELL_MAX_JOBS_PER_SCAN=100
VCELL_MAX_ODE_JOBS_PER_USER=100
VCELL_MAX_PDE_JOBS_PER_USER=40
-VCELL_WEB_DATA_PORT=55556
VCELL_SSH_CMD_TIMEOUT=10000
VCELL_SSH_CMD_RESTORE_TIMEOUT=5
@@ -235,6 +234,5 @@ VCELL_TAG=$VCELL_TAG
VCELL_UPDATE_SITE=http://vcell.org/webstart/${_site_camel}
VCELL_VERSION_NUMBER=$VCELL_VERSION_NUMBER
VCELL_VERSION=${_site_camel}_Version_${VCELL_VERSION_NUMBER}_build_${VCELL_BUILD_NUMBER}
-VCELL_WEB_DATA_PORT=$VCELL_WEB_DATA_PORT
EOF
diff --git a/docker/swarm/serverconfig-uch.sh b/docker/swarm/serverconfig-uch.sh
index 5b852ee94a..b5a405c9d9 100755
--- a/docker/swarm/serverconfig-uch.sh
+++ b/docker/swarm/serverconfig-uch.sh
@@ -167,7 +167,6 @@ VCELL_EXPORTDIR_HOST=/opt/vcelldata/export/
VCELL_MAX_JOBS_PER_SCAN=100
VCELL_MAX_ODE_JOBS_PER_USER=100
VCELL_MAX_PDE_JOBS_PER_USER=40
-VCELL_WEB_DATA_PORT=55555
VCELL_SSH_CMD_TIMEOUT=10000
VCELL_SSH_CMD_RESTORE_TIMEOUT=5
@@ -258,6 +257,5 @@ VCELL_TAG=$VCELL_TAG
VCELL_UPDATE_SITE=http://vcell.org/webstart/${_site_camel}
VCELL_VERSION_NUMBER=$VCELL_VERSION_NUMBER
VCELL_VERSION=${_site_camel}_Version_${VCELL_VERSION_NUMBER}_build_${VCELL_BUILD_NUMBER}
-VCELL_WEB_DATA_PORT=$VCELL_WEB_DATA_PORT
EOF
diff --git a/pom.xml b/pom.xml
index 385869fb5a..8fd9f5b665 100644
--- a/pom.xml
+++ b/pom.xml
@@ -64,7 +64,6 @@
vcell-util
vcell-vmicro
vcell-cli
- vcell-web
vcell-rest
vcell-restclient
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/ExecuteImpl.java b/vcell-cli/src/main/java/org/vcell/cli/run/ExecuteImpl.java
index 56fec53c25..f1556bee3e 100644
--- a/vcell-cli/src/main/java/org/vcell/cli/run/ExecuteImpl.java
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/ExecuteImpl.java
@@ -2,7 +2,6 @@
import cbit.vcell.parser.ExpressionException;
import cbit.vcell.solver.ode.ODESolverResultSet;
-import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.vcell.cli.CLIRecordable;
@@ -22,6 +21,8 @@
import java.util.List;
import java.util.Map;
+import static org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer.BiosimulationsHdfWriterException;
+
public class ExecuteImpl {
private final static Logger logger = LogManager.getLogger(ExecuteImpl.class);
@@ -65,7 +66,7 @@ public static void batchMode(File dirOfArchivesToProcess, File outputDir, CLIRec
if (inputFileName.endsWith("omex"))
runSingleExecOmex(inputFile, outputDir, cliLogger,
bKeepTempFiles, bExactMatchOnly, bSmallMeshOverride);
- } catch (ExecutionException | RuntimeException | HDF5Exception e){
+ } catch (ExecutionException e){
logger.error("Error caught executing batch mode", e);
Tracer.failure(e, "Error caught executing batch mode");
failedFiles.add(inputFileName);
@@ -105,7 +106,7 @@ public static void batchMode(File dirOfArchivesToProcess, File outputDir, CLIRec
private static void runSingleExecOmex(File inputFile, File outputDir, CLIRecordable cliLogger, boolean bKeepTempFiles,
boolean bExactMatchOnly, boolean bSmallMeshOverride)
- throws IOException, ExecutionException, PythonStreamException, HDF5Exception, InterruptedException {
+ throws IOException, ExecutionException, PythonStreamException, InterruptedException, BiosimulationsHdfWriterException {
String bioModelBaseName = inputFile.getName().substring(0, inputFile.getName().indexOf(".")); // ".omex"??
Files.createDirectories(Paths.get(outputDir.getAbsolutePath() + File.separator + bioModelBaseName)); // make output subdir
final boolean bEncapsulateOutput = true;
@@ -230,7 +231,7 @@ public static void singleExecVcml(File vcmlFile, File outputDir, CLIRecordable c
private static void singleExecOmex(File inputFile, File rootOutputDir, CLIRecordable cliRecorder,
boolean bKeepTempFiles, boolean bExactMatchOnly, boolean bEncapsulateOutput, boolean bSmallMeshOverride, boolean bBioSimMode)
- throws ExecutionException, PythonStreamException, IOException, InterruptedException, HDF5Exception {
+ throws ExecutionException, PythonStreamException, IOException, InterruptedException, BiosimulationsHdfWriterException {
ExecutionJob requestedExecution = new ExecutionJob(inputFile, rootOutputDir, cliRecorder,
bKeepTempFiles, bExactMatchOnly, bEncapsulateOutput, bSmallMeshOverride);
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/ExecutionJob.java b/vcell-cli/src/main/java/org/vcell/cli/run/ExecutionJob.java
index e3f0d52e45..38bf0df18b 100644
--- a/vcell-cli/src/main/java/org/vcell/cli/run/ExecutionJob.java
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/ExecutionJob.java
@@ -1,25 +1,22 @@
package org.vcell.cli.run;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.vcell.cli.CLIRecordable;
import org.vcell.cli.PythonStreamException;
import org.vcell.cli.exceptions.ExecutionException;
+import org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer;
import org.vcell.cli.run.hdf5.HDF5ExecutionResults;
-import org.vcell.cli.run.hdf5.Hdf5DataContainer;
import org.vcell.util.FileUtils;
-import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
-
-import org.vcell.cli.run.hdf5.Hdf5Writer;
-
-import org.apache.logging.log4j.Logger;
-import org.apache.logging.log4j.LogManager;
-
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
+import static org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer.BiosimulationsHdfWriterException;
+
/**
* Contains the code necessary to execute an Omex archive in VCell
*/
@@ -124,7 +121,7 @@ public void preprocessArchive() throws PythonStreamException, IOException {
* @throws IOException if there are system I/O issues
* @throws ExecutionException if an execution specfic error occurs
*/
- public void executeArchive(boolean isBioSimSedml) throws HDF5Exception, PythonStreamException, ExecutionException {
+ public void executeArchive(boolean isBioSimSedml) throws BiosimulationsHdfWriterException, PythonStreamException, ExecutionException {
try {
HDF5ExecutionResults masterHdf5File = new HDF5ExecutionResults(isBioSimSedml);
this.queueAllSedml();
@@ -145,7 +142,7 @@ public void executeArchive(boolean isBioSimSedml) throws HDF5Exception, PythonSt
if (hasSucceeded) logger.info("Processing of SedML succeeded.\n" + stats.toString());
else logger.error("Processing of SedML has failed.\n" + stats.toString());
}
- Hdf5Writer.writeHdf5(masterHdf5File, new File(this.outputDir));
+ BiosimulationsHdf5Writer.writeHdf5(masterHdf5File, new File(this.outputDir));
} catch(PythonStreamException e){
logger.error("Python-processing encountered fatal error. Execution is unable to properly continue.", e);
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5File.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5File.java
similarity index 86%
rename from vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5File.java
rename to vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5File.java
index d6c84b4d67..dd7b6ba472 100644
--- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5File.java
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5File.java
@@ -1,32 +1,29 @@
package org.vcell.cli.run.hdf5;
-//import ncsa.hdf.hdf5lib.*;
-
-import java.io.File;
-import java.io.IOException;
-
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-
-
-import java.util.List;
-import java.util.Map;
-import java.util.HashMap;
import ncsa.hdf.hdf5lib.H5;
import ncsa.hdf.hdf5lib.HDF5Constants;
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
-
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.vcell.cli.run.hdf5.Hdf5DataPreparer.Hdf5PreparedData;
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer.BiosimulationsHdfWriterException;
+
/**
* Class to handle Hdf5 creation, data, and assist with I/O.
*/
-public class Hdf5File {
+public class BiosimulationsHdf5File {
// NB: Hdf5 group management is ***important***.
- private final static Logger logger = LogManager.getLogger(Hdf5File.class);
+ private final static Logger logger = LogManager.getLogger(BiosimulationsHdf5File.class);
private static boolean needToCreateFile = true;
final private int H5F_ACC_TRUNC = HDF5Constants.H5F_ACC_TRUNC;
@@ -47,7 +44,7 @@ public class Hdf5File {
private Map pathToIdMap;
private Map datasetToDataspaceMap;
- private Hdf5File(){
+ private BiosimulationsHdf5File(){
this.fileId = HDF5Constants.H5I_INVALID_HID;
this.isOpen = false;
@@ -55,26 +52,26 @@ private Hdf5File(){
this.idToPathMap = new HashMap();
this.pathToIdMap = new HashMap();
this.datasetToDataspaceMap = new HashMap<>();
- Hdf5File.needToCreateFile = false;
+ BiosimulationsHdf5File.needToCreateFile = false;
}
/**
- * Creates an Hdf5File named "reports.h5" in the provided directory, and will throw exceptions where c-style error codes would be returned.
+ * Creates an BiosimulationsHdf5File named "reports.h5" in the provided directory, and will throw exceptions where c-style error codes would be returned.
*
* @param parentDir the directory to put the Hdf5 file inside.
*/
- public Hdf5File(File parentDir) { //"/home/ldrescher/VCell/hdf5Rebuild/testingDir"
+ public BiosimulationsHdf5File(File parentDir) { //"/home/ldrescher/VCell/hdf5Rebuild/testingDir"
this(parentDir, true);
}
/**
- * The main constructor for Hdf5File. Note the special interpretation of allowExceptions.
+ * The main constructor for BiosimulationsHdf5File. Note the special interpretation of allowExceptions.
*
* @param parentDir the directory to put the Hdf5 file inside of.
* @param allowExceptions Whether to interperate C-style error codes as exceptions or let the user handle them.
* Hdf5 Library-produced exceptions will still be generated regardless.
*/
- public Hdf5File(File parentDir, boolean allowExceptions){
+ public BiosimulationsHdf5File(File parentDir, boolean allowExceptions){
this(parentDir, "reports.h5", allowExceptions);
}
@@ -90,14 +87,14 @@ public void printErrorStack() {
}
/**
- * Complete constructor of `Hdf5File`
+ * Complete constructor of `BiosimulationsHdf5File`
*
* @param parentDir the directory to put the Hdf5 file inside.
* @param filename name of the h5 file to write.
* @param allowExceptions Whether to interpret C-style error codes as exceptions or let the user handle them.
* Hdf5 Library-produced exceptions will still be generated regardless.
*/
- public Hdf5File(File parentDir, String filename, boolean allowExceptions){
+ public BiosimulationsHdf5File(File parentDir, String filename, boolean allowExceptions){
this();
this.javaFileTarget = new File(parentDir, filename);
this.allowExceptions = allowExceptions;
@@ -109,8 +106,8 @@ public Hdf5File(File parentDir, String filename, boolean allowExceptions){
* @throws HDF5LibraryException
* @throws IOException
*/
- public void open() throws HDF5Exception, IOException {
- this.open(Hdf5File.needToCreateFile);
+ public void open() throws BiosimulationsHdf5Writer.BiosimulationsHdfWriterException, IOException {
+ this.open(BiosimulationsHdf5File.needToCreateFile);
}
/**
@@ -121,24 +118,29 @@ public void open() throws HDF5Exception, IOException {
* @throws HDF5LibraryException
* @throws IOException
*/
- public int open(boolean overwrite) throws HDF5Exception, IOException {
- String path = this.javaFileTarget.getCanonicalPath();
- if (overwrite) this.fileId = H5.H5Fopen(path, H5F_ACC_RDWR, H5P_DEFAULT);
- else this.fileId = H5.H5Fcreate(path, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
- if (this.fileId < 0){
- String typeOfOperation = overwrite? "opened [H5Fopen]" : "created [H5Fopened]";
+ public int open(boolean overwrite) throws BiosimulationsHdfWriterException, IOException {
+ try {
+ String path = this.javaFileTarget.getCanonicalPath();
+ if (overwrite) this.fileId = H5.H5Fopen(path, H5F_ACC_RDWR, H5P_DEFAULT);
+ else this.fileId = H5.H5Fcreate(path, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ } catch (HDF5LibraryException e) {
+ throw new BiosimulationsHdf5Writer.BiosimulationsHdfWriterException("HDF5 Library Exception", e);
+ }
+ if (this.fileId < 0) {
+ String typeOfOperation = overwrite ? "opened [H5Fopen]" : "created [H5Fopened]";
String message = "HDF5 File could not be " + typeOfOperation + "; Something is preventing this.";
- HDF5Exception e = new HDF5Exception(message); // investigate if Hdf5Exception would be more appropriate
+ IOException e = new IOException(message); // investigate if Hdf5Exception would be more appropriate
logger.warn("Hdf5 error occured", e);
if (this.allowExceptions) throw e;
}
+
this.isOpen = true;
return this.fileId;
}
/**
* Add a group to the Hdf5 file based on a given path. If the group exists, the group_id will be returned.
- *
+ *
* @param groupPath the unix-style path *relative from the Hdf5 root (known as "/")* to place the group at
* while hdf5 does allow with relative pathing from other groups, VCell does not support that at this time.
* @return the group ID
@@ -378,24 +380,28 @@ public int closeDataset(int datasetId) throws HDF5Exception {
return H5.H5Dclose(datasetId);
}
- public int close() throws HDF5Exception {
+ public int close() throws BiosimulationsHdfWriterException {
if (!this.isOpen) return 0;
//this.fileId = HDF5Constants.H5I_INVALID_HID;
this.isOpen = false;
- // Don't forget to close datasets (and their dataspaces)
- for (int datasetId : this.datasetToDataspaceMap.keySet()){
- this.closeDataset(datasetId);
- }
+ try {
+ // Don't forget to close datasets (and their dataspaces)
+ for (int datasetId : this.datasetToDataspaceMap.keySet()){
+ this.closeDataset(datasetId);
+ }
- // Don't forget to close all groups
- for (int groupId : this.idToPathMap.keySet()){
- H5.H5Gclose(groupId);
- }
- this.idToPathMap.clear();
- this.pathToIdMap.clear();
+ // Don't forget to close all groups
+ for (int groupId : this.idToPathMap.keySet()){
+ H5.H5Gclose(groupId);
+ }
+ this.idToPathMap.clear();
+ this.pathToIdMap.clear();
- return this.fileId < 0 ? this.fileId : (this.fileId = H5.H5Fclose(this.fileId));
+ return this.fileId < 0 ? this.fileId : (this.fileId = H5.H5Fclose(this.fileId));
+ } catch (HDF5Exception e) {
+ throw new BiosimulationsHdfWriterException(e.getMessage(),e);
+ }
}
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5Writer.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5Writer.java
similarity index 81%
rename from vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5Writer.java
rename to vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5Writer.java
index 42a66923ad..041ea32b94 100644
--- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5Writer.java
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5Writer.java
@@ -1,48 +1,53 @@
package org.vcell.cli.run.hdf5;
import cbit.vcell.resource.NativeLib;
-import ncsa.hdf.hdf5lib.H5;
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
-import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
-
import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.jlibsedml.Report;
import org.jlibsedml.SedML;
import org.vcell.cli.run.hdf5.Hdf5DataPreparer.Hdf5PreparedData;
-import java.util.*;
+
import java.io.File;
import java.io.IOException;
-
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
+import java.util.Arrays;
+import java.util.Set;
/**
* Static class for writing out Hdf5 formatted files
*/
-public class Hdf5Writer {
+public class BiosimulationsHdf5Writer {
+
+ public static class BiosimulationsHdfWriterException extends Exception {
+ private static final long serialVersionUID = 1L;
+ public BiosimulationsHdfWriterException(String message, Exception e) {
+ super(message, e);
+ }
+ }
- private final static Logger logger = LogManager.getLogger(Hdf5Writer.class);
+ private final static Logger logger = LogManager.getLogger(BiosimulationsHdf5Writer.class);
- private Hdf5Writer(){} // Static class = no instances allowed
+ private BiosimulationsHdf5Writer(){} // Static class = no instances allowed
/**
* Writes an HDF5 formatted file given a hdf5FileWrapper and a destination to write the file to.
*
* @param hdf5ExecutionResults the wrapper of hdf5 relevant data
* @param outDirForCurrentSedml the directory to place the report file into, NOT the report file itself.
- * @throws HDF5Exception if there is an expection thrown from hdf5 while using the library.
+ * @throws BiosimulationsHdfWriterException if there is an expection thrown from hdf5 while using the library.
* @throws IOException if the computer encounteres an unexepcted system IO problem
*/
- public static void writeHdf5(HDF5ExecutionResults hdf5ExecutionResults, File outDirForCurrentSedml) throws HDF5Exception, IOException {
+ public static void writeHdf5(HDF5ExecutionResults hdf5ExecutionResults, File outDirForCurrentSedml) throws BiosimulationsHdfWriterException, IOException {
boolean didFail = false;
- Hdf5File masterHdf5;
+ BiosimulationsHdf5File masterHdf5;
// Boot Hdf5 Library
NativeLib.HDF5.load();
// Create and open the Hdf5 file
logger.info("Creating hdf5 file `reports.h5` in" + outDirForCurrentSedml.getAbsolutePath());
- masterHdf5 = new Hdf5File(outDirForCurrentSedml);
+ masterHdf5 = new BiosimulationsHdf5File(outDirForCurrentSedml);
masterHdf5.open();
try {
@@ -102,10 +107,11 @@ else if (data.dataSource instanceof Hdf5SedmlResultsSpatial)
}
}
}
- } catch (Exception e) { // Catch runtime exceptions
+ } catch (HDF5Exception e) { // Catch runtime exceptions
didFail = true;
- logger.error("Error encountered while writing to BioSim-style HDF5.", e);
- throw e;
+ String message = "Error encountered while writing to BioSim-style HDF5.";
+ logger.error(message, e);
+ throw new BiosimulationsHdfWriterException(message, e);
} finally {
try {
final Level errorLevel = didFail ? Level.ERROR : Level.INFO;
@@ -115,10 +121,11 @@ else if (data.dataSource instanceof Hdf5SedmlResultsSpatial)
// Close up the file; lets deliver what we can write and flush out.
masterHdf5.close();
logger.log(errorLevel, message);
- } catch (HDF5LibraryException e){
+ } catch (BiosimulationsHdfWriterException e){
masterHdf5.printErrorStack();
- logger.error("HDF5 Library Exception encountered while writing out to HDF5 file; Check std::err for stack");
- if (!didFail) throw e;
+ String message = "HDF5 Library Exception encountered while writing out to HDF5 file; Check std::err for stack";
+ logger.error(message);
+ if (!didFail) throw new BiosimulationsHdfWriterException(message, e);
} catch (Exception e) {
e.printStackTrace();
}
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5DataPreparer.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5DataPreparer.java
index 4111bc64b0..7351d92b94 100644
--- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5DataPreparer.java
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5DataPreparer.java
@@ -1,17 +1,18 @@
package org.vcell.cli.run.hdf5;
-import java.util.*;
-import java.util.stream.Collectors;
-
-import org.jlibsedml.*;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
+import org.jlibsedml.DataSet;
+import org.jlibsedml.Report;
+import org.jlibsedml.VariableSymbol;
+
+import java.util.*;
/**
* Static data preparation class for Hdf5 files
*/
public class Hdf5DataPreparer {
- private final static Logger logger = LogManager.getLogger(Hdf5File.class);
+ private final static Logger logger = LogManager.getLogger(Hdf5DataPreparer.class);
public static class Hdf5PreparedData{
public String sedmlId;
@@ -37,7 +38,7 @@ public static double[] getSpatialHdf5Attribute_Times(Report report, Hdf5SedmlRes
}
/**
- * Reads a `Hdf5DatasetWrapper` contents and generates `Hdf5PreparedData` with spatial data for writing out to Hdf5 format via Hdf5Writer
+ * Reads a `Hdf5DatasetWrapper` contents and generates `Hdf5PreparedData` with spatial data for writing out to Hdf5 format via BiosimulationsHdf5Writer
*
* @param datasetWrapper the data relevant to an HDF5 output file
* @return the prepared spatial data
@@ -111,7 +112,7 @@ public static Hdf5PreparedData prepareSpatialData (Hdf5SedmlResults datasetWrapp
}
/**
- * Reads a `Hdf5DatasetWrapper` contents and generates `Hdf5PreparedData` with nonspatial data for writing out to Hdf5 format via Hdf5Writer
+ * Reads a `Hdf5DatasetWrapper` contents and generates `Hdf5PreparedData` with nonspatial data for writing out to Hdf5 format via BiosimulationsHdf5Writer
*
* @param datasetWrapper the data relevant to an hdf5 output file
* @return the prepared nonspatial data
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/NonspatialResultsConverter.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/NonspatialResultsConverter.java
index a5d1dc9c62..4969a57eae 100644
--- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/NonspatialResultsConverter.java
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/NonspatialResultsConverter.java
@@ -1,33 +1,19 @@
package org.vcell.cli.run.hdf5;
-import cbit.vcell.solver.Simulation;
import cbit.vcell.parser.ExpressionException;
+import cbit.vcell.solver.Simulation;
import cbit.vcell.solver.TempSimulation;
-import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
-
-import org.jlibsedml.SedML;
-import org.jlibsedml.AbstractTask;
-import org.jlibsedml.Output;
-import org.jlibsedml.Report;
-import org.jlibsedml.Variable;
-import org.jlibsedml.DataGenerator;
-import org.jlibsedml.RepeatedTask;
-import org.jlibsedml.Task;
-import org.jlibsedml.SubTask;
-import org.jlibsedml.UniformTimeCourse;
-import org.jlibsedml.DataSet;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.jlibsedml.*;
import org.jlibsedml.execution.IXPathToVariableIDResolver;
import org.jlibsedml.modelsupport.SBMLSupport;
import org.vcell.cli.PythonStreamException;
import org.vcell.cli.run.PythonCalls;
import org.vcell.cli.run.Status;
-import org.vcell.sbml.vcell.SBMLNonspatialSimResults;
import org.vcell.cli.run.TaskJob;
-import org.vcell.util.DataAccessException;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
+import org.vcell.sbml.vcell.SBMLNonspatialSimResults;
-import java.io.*;
import java.nio.file.Paths;
import java.util.*;
public class NonspatialResultsConverter {
diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/SpatialResultsConverter.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/SpatialResultsConverter.java
index 518f9b8436..ab66b7b837 100644
--- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/SpatialResultsConverter.java
+++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/SpatialResultsConverter.java
@@ -1,10 +1,9 @@
package org.vcell.cli.run.hdf5;
-import cbit.vcell.parser.ExpressionException;
import cbit.vcell.solver.TempSimulation;
-import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
-
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.jlibsedml.*;
import org.jlibsedml.execution.IXPathToVariableIDResolver;
import org.jlibsedml.modelsupport.SBMLSupport;
@@ -12,17 +11,14 @@
import org.vcell.cli.run.PythonCalls;
import org.vcell.cli.run.Status;
import org.vcell.cli.run.TaskJob;
-import org.vcell.util.DataAccessException;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import java.io.*;
+import java.io.File;
import java.nio.file.Paths;
import java.util.*;
public class SpatialResultsConverter {
private final static Logger logger = LogManager.getLogger(SpatialResultsConverter.class);
- public static Map> convertSpatialResultsToSedmlFormat(SedML sedml, Map spatialResultsHash, Map taskToSimulationMap, String sedmlLocation, String outDir) throws DataAccessException, IOException, HDF5Exception, ExpressionException, PythonStreamException {
+ public static Map> convertSpatialResultsToSedmlFormat(SedML sedml, Map spatialResultsHash, Map taskToSimulationMap, String sedmlLocation, String outDir) throws PythonStreamException {
Map> results = new LinkedHashMap<>();
List allReports = SpatialResultsConverter.getReports(sedml.getOutputs());
diff --git a/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/Hdf5WriterTest.java b/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5WriterTest.java
similarity index 93%
rename from vcell-cli/src/test/java/org/vcell/cli/run/hdf5/Hdf5WriterTest.java
rename to vcell-cli/src/test/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5WriterTest.java
index e793295bac..942f76aa1a 100644
--- a/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/Hdf5WriterTest.java
+++ b/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5WriterTest.java
@@ -3,20 +3,22 @@
import cbit.vcell.resource.NativeLib;
import cbit.vcell.resource.PropertyLoader;
import com.google.common.io.Files;
-import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
import org.jlibsedml.DataSet;
import org.jlibsedml.Report;
-import org.jlibsedml.SedML;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.vcell.util.VCellUtilityHub;
import java.io.File;
import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import static org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer.BiosimulationsHdfWriterException;
@Tag("Fast")
-public class Hdf5WriterTest {
+public class BiosimulationsHdf5WriterTest {
public static HDF5ExecutionResults createExampleData() {
@@ -95,12 +97,12 @@ public static HDF5ExecutionResults createExampleData() {
}
@Test
- public void test() throws HDF5Exception, IOException {
+ public void test() throws BiosimulationsHdfWriterException, IOException {
PropertyLoader.setProperty(PropertyLoader.installationRoot, new File("..").getAbsolutePath());
VCellUtilityHub.startup(VCellUtilityHub.MODE.CLI);
NativeLib.HDF5.load();
- HDF5ExecutionResults exampleHdf5FileWrapper = Hdf5WriterTest.createExampleData();
+ HDF5ExecutionResults exampleHdf5FileWrapper = BiosimulationsHdf5WriterTest.createExampleData();
File dir = Files.createTempDir();
- Hdf5Writer.writeHdf5(exampleHdf5FileWrapper, dir);
+ BiosimulationsHdf5Writer.writeHdf5(exampleHdf5FileWrapper, dir);
}
}
\ No newline at end of file
diff --git a/vcell-client/src/main/java/cbit/plot/gui/Plot2DDataPanel.java b/vcell-client/src/main/java/cbit/plot/gui/Plot2DDataPanel.java
index a9b00ec6a2..0c669b6f08 100644
--- a/vcell-client/src/main/java/cbit/plot/gui/Plot2DDataPanel.java
+++ b/vcell-client/src/main/java/cbit/plot/gui/Plot2DDataPanel.java
@@ -9,53 +9,31 @@
*/
package cbit.plot.gui;
-import java.awt.Component;
-import java.awt.event.ActionEvent;
-import java.awt.event.ActionListener;
-import java.awt.event.MouseEvent;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.ListIterator;
-import java.util.Vector;
-
-import javax.swing.ButtonGroup;
-import javax.swing.JFileChooser;
-import javax.swing.JFrame;
-import javax.swing.JMenuItem;
-import javax.swing.JOptionPane;
-import javax.swing.JPanel;
-import javax.swing.JPopupMenu;
-import javax.swing.JRadioButton;
-import javax.swing.KeyStroke;
-import javax.swing.table.DefaultTableModel;
-import org.apache.commons.lang3.StringUtils;
+import cbit.plot.Plot2D;
+import cbit.vcell.client.UserMessage;
+import cbit.vcell.desktop.VCellTransferable;
+import cbit.vcell.math.ReservedVariable;
+import cbit.vcell.parser.Expression;
+import cbit.vcell.parser.SymbolTableEntry;
+import cbit.vcell.simdata.UiTableExporterToHDF5;
+import cbit.vcell.solver.Simulation;
+import com.google.common.io.Files;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.vcell.util.UtilCancelException;
import org.vcell.util.gui.DialogUtils;
import org.vcell.util.gui.NonEditableDefaultTableModel;
import org.vcell.util.gui.ScrollTable;
-import org.vcell.util.gui.SimpleUserMessage;
import org.vcell.util.gui.SpecialtyTableRenderer;
-import com.google.common.io.Files;
+import javax.swing.*;
+import java.awt.*;
+import java.awt.event.ActionEvent;
+import java.awt.event.ActionListener;
+import java.awt.event.MouseEvent;
+import java.io.File;
-import cbit.plot.Plot2D;
-import cbit.vcell.client.UserMessage;
-import cbit.vcell.desktop.VCellTransferable;
-import cbit.vcell.math.ReservedVariable;
-import cbit.vcell.parser.Expression;
-import cbit.vcell.parser.SimpleSymbolTable;
-import cbit.vcell.parser.SymbolTableEntry;
-import cbit.vcell.simdata.Hdf5Utils;
-import cbit.vcell.simdata.Hdf5Utils.HDF5WriteHelper;
-import cbit.vcell.solver.Simulation;
-import ncsa.hdf.hdf5lib.H5;
-import ncsa.hdf.hdf5lib.HDF5Constants;
-import javax.swing.JLabel;
-import java.awt.BorderLayout;
/**
* Insert the type's description here.
* Creation date: (4/19/2001 12:33:58 PM)
@@ -305,12 +283,7 @@ public void setSimulation(Simulation simulation) {
private synchronized void copyCells(CopyAction copyAction) {
copyCells0(copyAction,false);
}
-/**
- * Insert the method's description here.
- * Creation date: (4/20/2001 4:52:52 PM)
- * @param actionCommand java.lang.String
- * @return java.lang.String
- */
+
private synchronized void copyCells0(CopyAction copyAction,boolean isHDF5) {
try{
int r = 0;
@@ -375,438 +348,49 @@ else if (copyAction == CopyAction.copyrow) {
return;
}
}
- int hdf5FileID = -1;//Used if HDF5 format
- File hdf5TempFile = null;
-// Hdf5Utils.HDF5WriteHelper help0 = null;
- try {
- hdf5TempFile = File.createTempFile("plot2D", ".hdf");
- //System.out.println("/home/vcell/Downloads/hdf5/HDFView/bin/HDFView "+hdf5TempFile.getAbsolutePath());
- hdf5FileID = H5.H5Fcreate(hdf5TempFile.getAbsolutePath(), HDF5Constants.H5F_ACC_TRUNC,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
- ArrayList> paramScanJobs = new ArrayList>();
- if(!bHistogram && !getScrollPaneTable().getColumnName(0).equals((xVarColumnName==null?ReservedVariable.TIME.getName():xVarColumnName))) {
- throw new Exception("Expecting first column in table to have name '"+xVarColumnName+"'");
- }
- //Add arraylist for the parameter scan job, add the index of the xval column
- for(int i=0;i tempAL = new ArrayList();
- paramScanJobs.add(tempAL);
- break;
- } else if(getScrollPaneTable().getColumnName(i).equals((xVarColumnName==null?ReservedVariable.TIME.getName():xVarColumnName))){
- if(i==0) {
- ArrayList tempAL = new ArrayList();
- tempAL.add(i);
- paramScanJobs.add(tempAL);
- }else {
- String str1 = getScrollPaneTable().getColumnName(i-1);
- int str1Index = str1.lastIndexOf("Set ");
- String str2 = getScrollPaneTable().getColumnName(i+1);
- int str2Index = str2.lastIndexOf("Set ");
- if(!str1.substring(str1Index).equals(str2.substring(str2Index))) {
- ArrayList tempAL = new ArrayList();
- tempAL.add(i);
- paramScanJobs.add(tempAL);
- }else {
- continue;
- }
- }
- }
- }
- //Add selected columns to the proper paramscan arraylist
- for(int j=0;j= paramScanJobs.get(k).get(0) && ((k+1) == paramScanJobs.size() || columns[j] < paramScanJobs.get(k+1).get(0))) {
- paramScanJobs.get(k).add(columns[j]);
-// System.out.println("HDF5frm"+getScrollPaneTable().getColumnName(columns[j]));
- }
- }
- }
+ int columnCount = getScrollPaneTable().getColumnCount();
+ int rowCount = getScrollPaneTable().getRowCount();
+ String[] columnNames = new String[columnCount];
+ for (int i=0; i listIterator = paramScanJobs.get(k).listIterator();
- if(paramScanJobs.get(k).size() > 1) {// keep x val is there more selections for this set
- listIterator.next();
- }
- while(listIterator.hasNext()) {
- final Integer columIndex = listIterator.next();
- boolean bFound = false;
- for(int j=0;j> listIterator = paramScanJobs.listIterator();
-// while(listIterator.hasNext()) {
-// final ArrayList next = listIterator.next();
-// if(next.size() == 0) {
-// listIterator.remove();
-// }
-//// selectedColCount+= next.size();
-// }
- //Write out the data to HDF5 file
- for(int k=0;k dataTypes = new ArrayList();
- ArrayList dataIDs = new ArrayList();
- ArrayList dataShapes = new ArrayList();
- ArrayList dataLabels = new ArrayList();
- ArrayList dataNames = new ArrayList();
- ArrayList paramNames = new ArrayList();
- ArrayList paramValues = new ArrayList();
- boolean bParamsDone = false;
- for(int cols=0;cols paramScanJobCols = null;
-// for(int i=0;i();
-// }else {
-// continue;
-// }
-// for(int j=0;j= lastXCol) {
-// paramScanJobCols.add(columns[j]);
-// }
-// }
-// if(paramScanJobCols.size() > 0) {
-// paramScanJobs.add(paramScanJobCols);
-// }
-// }
-//
-// for(int i=0;i xColumns = new ArrayList();
-// //Check if multiple columns with time (happens when viewing 'Time Plot with Multiple Parameter Value-sets')
-//// ArrayList nonTColumns = new ArrayList();
-// for(int i=0;i 0) {
-//// hdfValues = new double[rows.length*nonTColumns.size()];
-//// int cnt=0;
-//// for(int j=0;j values = new ArrayList();
-
- @Override
- public int getRowCount() {
- return values.size();
- }
-
- @Override
- public int getColumnCount() {
- return cols.length;
- }
-
-// private boolean isIndexColumn(int columnIndex)
-// {
-// String col = cols.get(columnIndex);
-// return col.equalsIgnoreCase("i") || col.equalsIgnoreCase("j")
-// || col.equalsIgnoreCase("k") || col.equalsIgnoreCase("index");
-// }
- @Override
- public Object getValueAt(int rowIndex, int columnIndex) {
- Number d = values.get(rowIndex)[columnIndex];
- return d;
- }
-
- @Override
- public String getColumnName(int column) {
- return cols[column];
- }
-
- public void setData(String[] cols, List values)
- {
- this.cols = cols;
- this.values = values;
- fireTableDataChanged();
- }
-
- public void refreshTable()
- {
- fireTableStructureChanged();
- fireTableDataChanged();
- }
-
- public void clear()
- {
- cols = new String[0];
- values.clear();
- refreshTable();
- }
- }
-
- private static class SolTableCellRenderer extends DefaultTableCellRenderer
- {
-
- @Override
- public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row,
- int column) {
- // TODO Auto-generated method stub
- Component label = super.getTableCellRendererComponent(table, value, isSelected, hasFocus,
- row, column);
- setForeground(Color.black);
- if (!isSelected)
- {
- if (value instanceof Number)
- {
- if (((Number) value).doubleValue() == SimDataConstants.BASEFAB_REAL_SETVAL)
- {
- setForeground(Color.gray);
- }
- }
- }
- return label;
- }
-
- }
-
- private static class SolTableModel extends AbstractTableModel
- {
- private static final int COL_INDEX = 0;
- private static final int COL_VALUE = 1;
- private final static String[] cols = {"index", "value"};
- private double[] values = null;
-
- @Override
- public int getRowCount() {
- return values == null ? 0 : values.length;
- }
-
- @Override
- public int getColumnCount() {
- return cols.length;
- }
-
- @Override
- public Class> getColumnClass(int columnIndex) {
- return Number.class;
- }
-
- @Override
- public Object getValueAt(int rowIndex, int columnIndex) {
- if (columnIndex == COL_INDEX)
- {
- return rowIndex;
- }
- return values[rowIndex];
- }
-
- @Override
- public String getColumnName(int column) {
- return cols[column];
- }
-
- public void setValues(double[] v)
- {
- values = v;
- fireTableDataChanged();
- }
-
- public void clear()
- {
- setValues(new double[0]);
- }
- }
-
- private static class TimePlotTableModel extends AbstractTableModel
- {
- private static final int COL_TIME = 0;
- private static final int COL_VALUE = 1;
- private final static String[] cols = {"time", "value"};
- private double[] values = null;
- private double[] times = null;
-
- @Override
- public int getRowCount() {
- return values == null ? 0 : values.length;
- }
-
- @Override
- public int getColumnCount() {
- return cols.length;
- }
-
- @Override
- public Object getValueAt(int rowIndex, int columnIndex) {
- return columnIndex == COL_TIME ? times[rowIndex] : values[rowIndex];
- }
-
- @Override
- public String getColumnName(int column) {
- return cols[column];
- }
-
- public void setTimesAndValues(double[] t, double[] v)
- {
- times = t;
- values = v;
- fireTableDataChanged();
- }
- public void clear()
- {
- setTimesAndValues(new double[0], new double[0]);
- }
- }
- private class EventListener implements ActionListener, ListSelectionListener
- {
- @Override
- public void actionPerformed(ActionEvent e) {
-
- if (e.getSource() == resetButton)
- {
- reset();
- }
- else if (e.getSource() == okButton)
- {
- retrieveVariablesAndTimes();
- }
- else if (e.getSource() == timeComboBox)
- {
- retrieveData();
- }
- else if (e.getSource() == timePlotButton)
- {
- retrieveTimePlot();
- }
- else if (e.getSource() == exitButton)
- {
- System.exit(0);
- }
- }
-
- @Override
- public void valueChanged(ListSelectionEvent e) {
- if (e.getValueIsAdjusting())
- {
- return;
- }
- if (e.getSource() == varList)
- {
- retrieveData();
- }
- else if (e.getSource() == solTable.getSelectionModel())
- {
- timePlotButton.setEnabled(solTable.getSelectedRowCount() == 1);
- }
- }
- }
-
- private JPanel mainPanel = new JPanel();
- private JList varList = new JList();
- private JButton okButton = new JButton("Go");
- private JButton resetButton = new JButton("Reset");
- private JButton timePlotButton = new JButton("Time Plot");
- private JButton exitButton = new JButton("Exit");
- private JTable solTable= new JTable();
- private SolTableModel solTableModel = new SolTableModel();
- private JTable timePlotTable= new JTable();
- private TimePlotTableModel timePlotTableModel = new TimePlotTableModel();
- private TextFieldAutoCompletion dataDirTextField = new TextFieldAutoCompletion();
- private TextFieldAutoCompletion userNameTextField = new TextFieldAutoCompletion();
- private TextFieldAutoCompletion simIdField = new TextFieldAutoCompletion();
-// private JPasswordField remotePasswordField = new JPasswordField();
- private JComboBox timeComboBox = new JComboBox();
- private JLabel solLabel = new JLabel("Solution");
- private JLabel timePlotLabel = new JLabel("Time Plot");
- private SimulationDataSpatialHdf5 simData = null;
- private EventListener listener = new EventListener();
- private Set simIds = new HashSet();
- private Set usernames = new HashSet();
- private Set datadirs = new HashSet();
- private JTabbedPane dataTabbedPane = new JTabbedPane();
- private JTable meshMetricsTable = new JTable();
- private MeshMetricsTableModel meshMetricsTableModel = new MeshMetricsTableModel();
- private static boolean debug = false;
- private JPanel timePlotPanel;
- private JTextField maxErrorTextField = new JTextField();
- private JTextField l2ErrorTextField = new JTextField();
- private JTextField meanTextField = new JTextField();
- private JTextField sumVolFracTextField = new JTextField();
- private JPanel errorPanel = null;
-
- private ChomboSimpleDataViewer()
- {
- setTitle("Chombo Simple Data Viewer");
- setDefaultCloseOperation(EXIT_ON_CLOSE);
- initialize();
- }
-
- private JPanel getErrorPanel()
- {
- if (errorPanel == null)
- {
- errorPanel = new JPanel();
- errorPanel.setLayout(new GridBagLayout());
- errorPanel.setBorder(GuiConstants.TAB_PANEL_BORDER);
-
- meanTextField.setEditable(false);
- sumVolFracTextField.setEditable(false);
- maxErrorTextField.setEditable(false);
- l2ErrorTextField.setEditable(false);
-
- GridBagConstraints gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = 0;
- gbc.insets = new Insets(2, 2, 2, 2);
- JLabel label = new JLabel("Mean");
- label.setFont(label.getFont().deriveFont(Font.BOLD));
- errorPanel.add(label, gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 1;
- gbc.gridy = 0;
- gbc.fill = GridBagConstraints.HORIZONTAL;
- gbc.weightx = 1.0;
- gbc.insets = new Insets(2, 2, 2, 2);
- errorPanel.add(meanTextField, gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 2;
- gbc.gridy = 0;
- gbc.insets = new Insets(2, 2, 2, 2);
- label = new JLabel("Vol Frac Sum");
- label.setFont(label.getFont().deriveFont(Font.BOLD));
- errorPanel.add(label, gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 3;
- gbc.gridy = 0;
- gbc.fill = GridBagConstraints.HORIZONTAL;
- gbc.weightx = 1.0;
- gbc.insets = new Insets(2, 2, 2, 2);
- errorPanel.add(sumVolFracTextField, gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = 1;
- gbc.insets = new Insets(2, 2, 2, 2);
- label = new JLabel("Max Error");
- label.setFont(label.getFont().deriveFont(Font.BOLD));
- errorPanel.add(label, gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 1;
- gbc.gridy = 1;
- gbc.fill = GridBagConstraints.HORIZONTAL;
- gbc.weightx = 1.0;
- gbc.insets = new Insets(2, 2, 2, 2);
- errorPanel.add(maxErrorTextField, gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 2;
- gbc.gridy = 1;
- gbc.insets = new Insets(2, 2, 2, 2);
- label = new JLabel("Relative L2 Error");
- label.setFont(label.getFont().deriveFont(Font.BOLD));
- errorPanel.add(label, gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 3;
- gbc.gridy = 1;
- gbc.fill = GridBagConstraints.HORIZONTAL;
- gbc.weightx = 1.0;
- gbc.insets = new Insets(2, 2, 2, 2);
- errorPanel.add(l2ErrorTextField, gbc);
- }
-
- return errorPanel;
- }
-
- private JPanel createSolPanel()
- {
- JPanel solPanel = new JPanel(new GridBagLayout());
-
- int gridy = 0;
- GridBagConstraints gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.weightx = 1;
- gbc.fill = GridBagConstraints.HORIZONTAL;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.anchor = GridBagConstraints.LINE_START;
- solLabel.setFont(solLabel.getFont().deriveFont(Font.BOLD));
- solPanel.add(solLabel, gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 1;
- gbc.gridy = gridy;
- gbc.insets = new Insets(2, 2, 2, 10);
- gbc.anchor = GridBagConstraints.LINE_END;
- solPanel.add(timePlotButton, gbc);
-
- ++ gridy;
- gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.gridwidth = GridBagConstraints.REMAINDER;
- gbc.weightx = 1.0;
- gbc.weighty = 1.0;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.fill = GridBagConstraints.BOTH;
- solPanel.add(new JScrollPane(solTable), gbc);
-
- ++ gridy;
- gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.weightx = 1.0;
- gbc.gridwidth = GridBagConstraints.REMAINDER;
- gbc.fill = GridBagConstraints.HORIZONTAL;
- gbc.insets = new Insets(2, 2, 2, 2);
- solPanel.add(getErrorPanel(), gbc);
-
- return solPanel;
- }
-
- JFileChooser jFileChooser;
- private ActionListener dataBrowseActionListener = new ActionListener() {
- @Override
- public void actionPerformed(ActionEvent e) {
- if(jFileChooser == null){
- jFileChooser = new JFileChooser();
-// jFileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
- }
- int returnVal = jFileChooser.showOpenDialog(ChomboSimpleDataViewer.this);
-
- if (returnVal == JFileChooser.APPROVE_OPTION){
- File selectedFile = jFileChooser.getSelectedFile();
- String formattedName = null;
- String userName = null;
- if(selectedFile.isFile() && selectedFile.getName().startsWith("SimID_")){
- formattedName = selectedFile.getName();
- userName = selectedFile.getParentFile().getName();
- dataDirTextField.setText(jFileChooser.getSelectedFile().getParentFile().getParentFile().getAbsolutePath());
- }else{
- dataDirTextField.setText(jFileChooser.getSelectedFile().getAbsolutePath());
- }
- if(formattedName != null){
- StringTokenizer st = new StringTokenizer(formattedName,"_");
- st.nextToken();
- simIdField.setText(st.nextToken());
- if(userName != null){
- userNameTextField.setText(userName);
- }
- }
- } else {
- return;
- }
- }
- };
- private JPanel createInputPanel()
- {
- JPanel inputPanel = new JPanel(new GridBagLayout());
- inputPanel.setBorder(BorderFactory.createTitledBorder(GuiConstants.TAB_PANEL_BORDER, "Input"));
- int gridy = 0;
- GridBagConstraints gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.anchor = GridBagConstraints.LINE_END;
- JLabel label = new JLabel("User");
- label.setFont(label.getFont().deriveFont(Font.BOLD));
- inputPanel.add(label, gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 1;
- gbc.gridy = gridy;
- gbc.weightx = 1;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.fill = GridBagConstraints.HORIZONTAL;
- inputPanel.add(userNameTextField, gbc);
-
- gridy ++;
- gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.anchor = GridBagConstraints.LINE_END;
- JButton dataBrowseButton = new JButton("Data Dir");
- dataBrowseButton.setFont(label.getFont().deriveFont(Font.BOLD));
- inputPanel.add(dataBrowseButton, gbc);
- dataBrowseButton.addActionListener(dataBrowseActionListener);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 1;
- gbc.gridy = gridy;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.fill = GridBagConstraints.HORIZONTAL;
- inputPanel.add(dataDirTextField, gbc);
-
- gridy ++;
- gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.anchor = GridBagConstraints.LINE_END;
- label = new JLabel("Sim ID");
- label.setFont(label.getFont().deriveFont(Font.BOLD));
- inputPanel.add(label, gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 1;
- gbc.gridy = gridy;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.fill = GridBagConstraints.HORIZONTAL;
- inputPanel.add(simIdField, gbc);
-
-// gridy ++;
-// gbc = new GridBagConstraints();
-// gbc.gridx = 0;
-// gbc.gridy = gridy;
-// gbc.insets = new Insets(2, 2, 2, 2);
-// gbc.anchor = GridBagConstraints.LINE_END;
-// label = new JLabel("RmtPW");
-// label.setFont(label.getFont().deriveFont(Font.BOLD));
-// inputPanel.add(label, gbc);
-//
-// gbc = new GridBagConstraints();
-// gbc.gridx = 1;
-// gbc.gridy = gridy;
-// gbc.insets = new Insets(2, 2, 2, 2);
-// gbc.fill = GridBagConstraints.HORIZONTAL;
-// inputPanel.add(remotePasswordField, gbc);
-
- gridy ++;
- JPanel panel1 = new JPanel(new FlowLayout(FlowLayout.RIGHT));
- panel1.add(resetButton);
- panel1.add(okButton);
- gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.gridwidth = 2;
- gbc.weightx = 0.2;
- gbc.fill = GridBagConstraints.HORIZONTAL;
- gbc.insets = new Insets(2, 2, 2, 2);
- inputPanel.add(panel1, gbc);
- return inputPanel;
- }
-
- private JPanel createSelectionPanel()
- {
- JPanel selectionPanel = new JPanel(new GridBagLayout());
- selectionPanel.setBorder(GuiConstants.TAB_PANEL_BORDER);
- int gridy = 0;
- GridBagConstraints gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.anchor = GridBagConstraints.LINE_END;
- JLabel label = new JLabel("Time");
- label.setFont(label.getFont().deriveFont(Font.BOLD));
- selectionPanel.add(label, gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 1;
- gbc.gridy = gridy;
- gbc.weightx = 1.0;
- gbc.anchor = GridBagConstraints.LINE_START;
- gbc.fill = GridBagConstraints.HORIZONTAL;
- gbc.insets = new Insets(2, 2, 2, 2);
- selectionPanel.add(timeComboBox, gbc);
-
- gridy ++;
- gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.anchor = GridBagConstraints.FIRST_LINE_END;
- label = new JLabel("Variable");
- label.setFont(label.getFont().deriveFont(Font.BOLD));
- selectionPanel.add(label, gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 1;
- gbc.gridy = gridy;
- gbc.weighty = 1.0;
- gbc.insets = new Insets(2, 2, 10, 2);
- gbc.fill = GridBagConstraints.BOTH;
- selectionPanel.add(new JScrollPane(varList), gbc);
- return selectionPanel;
- }
-
- private JPanel createTimePlotPanel()
- {
- timePlotPanel = new JPanel(new GridBagLayout());
-
- int gridy = 0;
- GridBagConstraints gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.anchor = GridBagConstraints.LINE_START;
- timePlotLabel.setFont(timePlotLabel.getFont().deriveFont(Font.BOLD));
- timePlotPanel.add(timePlotLabel, gbc);
-
- ++ gridy;
- gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.gridwidth = GridBagConstraints.REMAINDER;
- gbc.gridheight = GridBagConstraints.REMAINDER;
- gbc.weightx = 1.0;
- gbc.weighty = 1.0;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.fill = GridBagConstraints.BOTH;
- timePlotPanel.add(new JScrollPane(timePlotTable), gbc);
-
- return timePlotPanel;
- }
-
- private void initialize() {
- setSize(1000, 500);
- GeneralGuiUtils.centerOnScreen(this);
-
- solTable.setModel(solTableModel);
- solTable.setDefaultRenderer(Number.class, new SolTableCellRenderer());
- meshMetricsTable.setModel(meshMetricsTableModel);
- timePlotTable.setModel(timePlotTableModel);
- varList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
-
- mainPanel.setLayout(new GridBagLayout());
-
- JPanel meshMetricsPanel = new JPanel(new BorderLayout());
- meshMetricsPanel.add(new JScrollPane(meshMetricsTable), BorderLayout.CENTER);
-
- dataTabbedPane.addTab("Solution", createSolPanel());
- dataTabbedPane.addTab("Mesh Metrics", meshMetricsPanel);
- dataTabbedPane.addTab("Time Plot", createTimePlotPanel());
-
- int gridy = 0;
- GridBagConstraints gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.weightx = 0.4;
- gbc.fill = GridBagConstraints.HORIZONTAL;
- gbc.insets = new Insets(2, 2, 2, 2);
- mainPanel.add(createInputPanel(), gbc);
-
- gbc = new GridBagConstraints();
- gbc.gridx = 1;
- gbc.gridy = gridy;
- gbc.gridwidth = GridBagConstraints.REMAINDER;
- gbc.gridheight = GridBagConstraints.REMAINDER;
- gbc.weightx = 1.0;
- gbc.weighty = 1.0;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.fill = GridBagConstraints.BOTH;
- mainPanel.add(dataTabbedPane, gbc);
-
- gridy ++;
- gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.insets = new Insets(2, 2, 2, 2);
- gbc.fill = GridBagConstraints.BOTH;
- gbc.weighty = 1.0;
- gbc.weightx = 0.2;
- mainPanel.add(createSelectionPanel(), gbc);
-
- gridy ++;
- gbc = new GridBagConstraints();
- gbc.gridx = 0;
- gbc.gridy = gridy;
- gbc.insets = new Insets(2, 2, 20, 20);
- gbc.anchor = GridBagConstraints.LINE_END;
- mainPanel.add(exitButton, gbc);
-
- add(mainPanel);
- reset();
-
- resetButton.addActionListener(listener);
- okButton.addActionListener(listener);
- timeComboBox.addActionListener(listener);
- exitButton.addActionListener(listener);
- varList.addListSelectionListener(listener);
- timePlotButton.setEnabled(false);
- timePlotButton.addActionListener(listener);
- solTable.getSelectionModel().addListSelectionListener(listener);
-
- dataDirTextField.addMouseListener(new MouseAdapter() {
- @Override
- public void mouseEntered(MouseEvent e) {
- dataDirTextField.setToolTipText(dataDirTextField.getText());
- }
- });
- varList.setCellRenderer(new DefaultListCellRenderer(){
-
- @Override
- public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) {
- super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
- if (value instanceof DataSetIdentifier)
- {
- setText(((DataSetIdentifier) value).getName());
- }
- return this;
- }
-
- });
- }
-
- private void reset()
- {
- userNameTextField.setText("boris");
- dataDirTextField.setText("\\\\cfs01.cam.uchc.edu\\ifs\\RAID\\Vcell\\users\\");
-
- if (debug)
- {
- userNameTextField.setText("fgao1");
- dataDirTextField.setText("C:\\chombo\\data\\users\\");
-// simIdField.setText("77396269");
- simIdField.setText("77764707");
- }
- }
-
- private static class SimDataInfoHolder{
- public SimulationDataSpatialHdf5 simData;
- public File userDir;
- public SimDataInfoHolder(SimulationDataSpatialHdf5 simData, File userDir) {
- this.simData = simData;
- this.userDir = userDir;
- }
- }
- private SimDataInfoHolder createSimulationDataFromDir(File dataDir,String userid,VCSimulationDataIdentifier vcDataId) throws Exception{
- File userDir = new File(dataDir, userid);
- return new SimDataInfoHolder(new SimulationDataSpatialHdf5(vcDataId, userDir, null),userDir);
- }
- private JPasswordField jPasswordField = new JPasswordField();
- private SimDataInfoHolder createSimulationDataFromRemote(String userid,VCSimulationDataIdentifier vcDataId) throws Exception{
- SimDataInfoHolder simDataInfoHolder = null;
- SimulationDataSpatialHdf5 simData = null;
- try{
- //Try well known primary data dir from windows
-// if(true){throw new Exception();}
- File userDir = new File("\\\\cfs01.cam.uchc.edu\\ifs\\raid\\vcell\\users",userid);
- simData = new SimulationDataSpatialHdf5(vcDataId, userDir, null);
- simDataInfoHolder = new SimDataInfoHolder(simData,userDir);
- }catch(Exception e){
- try{
- //Try well known secondary data dir from windows
-// if(true){throw new Exception();}
- File userDir = new File("\\\\cfs02.cam.uchc.edu\\raid\\vcell\\users",userid);
- simData = new SimulationDataSpatialHdf5(vcDataId,userDir, null);
- simDataInfoHolder = new SimDataInfoHolder(simData,userDir);
- }catch(Exception e2){
- //try ssh download from linux server
- if(DialogUtils.showComponentOKCancelDialog(ChomboSimpleDataViewer.this, jPasswordField, "Enter cluster password for 'vcell'") != JOptionPane.OK_OPTION){
- throw UserCancelException.CANCEL_GENERIC;
- }
-
- File tempSimDir = File.createTempFile("VCellUsersDir", ".dir");
- tempSimDir.delete();
- File tmpdir = new File(tempSimDir.getParentFile(),"VCellUsersDir");
- if(!tmpdir.exists() && !tmpdir.mkdir()){
- throw new Exception("Couldn't make local dir "+tmpdir);
- }
- File downloadDir = SimDataConnection.downloadSimData(tmpdir, new String(jPasswordField.getPassword()), userid, vcDataId.getSimulationKey(), 0, false);
- simData = new SimulationDataSpatialHdf5(vcDataId,downloadDir, null);
- simDataInfoHolder = new SimDataInfoHolder(simData,downloadDir);
- }
- }
- return simDataInfoHolder;
- }
- private void retrieveVariablesAndTimes()
- {
- AsynchClientTask task0 = new AsynchClientTask("clear", AsynchClientTask.TASKTYPE_SWING_BLOCKING) {
-
- @Override
- public void run(Hashtable hashTable) throws Exception {
- DefaultComboBoxModel dcm = new DefaultComboBoxModel();
- timeComboBox.setModel(dcm);
- DefaultListModel dlm = new DefaultListModel();
- varList.setModel(dlm);
- solTableModel.clear();
- meshMetricsTableModel.clear();
- meanTextField.setText(null);
- maxErrorTextField.setText(null);
- sumVolFracTextField.setText(null);
- l2ErrorTextField.setText(null);
- }
- };
-
- AsynchClientTask task1 = new AsynchClientTask("retrieve data", AsynchClientTask.TASKTYPE_NONSWING_BLOCKING) {
-
- @Override
- public void run(Hashtable hashTable) throws Exception {
- String simId = simIdField.getText().trim();
- if (simId == null || simId.length() == 0)
- {
- throw new RuntimeException("Please provide a simulation id.");
- }
- String username = userNameTextField.getText().trim();
- if (username == null || username.length() == 0)
- {
- throw new RuntimeException("Please provide a user name.");
- }
- VCSimulationDataIdentifier vcDataId = new VCSimulationDataIdentifier(new VCSimulationIdentifier(new KeyValue(simId), new User(username, null)), 0);
- SimDataInfoHolder simDataInfoHolder = null;
- String datadir = dataDirTextField.getText();
- if (datadir == null || datadir.length() == 0){
- simDataInfoHolder = createSimulationDataFromRemote(username, vcDataId);
- datadir = simDataInfoHolder.userDir.getParent();
- dataDirTextField.setText(datadir);
- }else{
- simDataInfoHolder = createSimulationDataFromDir(new File(datadir), username, vcDataId);
- }
- simData = simDataInfoHolder.simData;
- simData.readVarAndFunctionDataIdentifiers();
- usernames.add(username);
- userNameTextField.setAutoCompletionWords(usernames);
- datadirs.add(datadir);
- dataDirTextField.setAutoCompletionWords(datadirs);
- simIds.add(simId);
- simIdField.setAutoCompletionWords(simIds);
- }
- };
-
- AsynchClientTask task2 = new AsynchClientTask("show data", AsynchClientTask.TASKTYPE_SWING_BLOCKING) {
-
- @Override
- public void run(Hashtable hashTable) throws Exception {
- double[] times = simData.getDataTimes();
- DefaultComboBoxModel dcm = new DefaultComboBoxModel();
- for(double t : times)
- {
- dcm.addElement(t);
- }
- timeComboBox.setModel(dcm);
- meshMetricsTableModel.setData(simData.getChomboMesh().getMetricsColumnNames(), simData.getChomboMesh().getMetricsNumbers());
- List dsiList = simData.getDataSetIdentifiers();
- DefaultListModel dlm = new DefaultListModel();
- for (DataSetIdentifier dsi : dsiList)
- {
- dlm.addElement(dsi);
- }
- varList.setModel(dlm);
- if (times.length > 0)
- {
- timeComboBox.setSelectedIndex(0);
- }
- if (dsiList.size() > 0)
- {
- varList.setSelectedIndex(0);
- }
- meshMetricsTableModel.refreshTable();
- }
- };
- ClientTaskDispatcher.dispatch(this, new Hashtable(), new AsynchClientTask[] {task0, task1, task2}, false);
- }
-
- private void retrieveTimePlot()
- {
- if (solTable.getSelectedRow() < 0 || varList.getSelectedIndex() < 0)
- {
- return;
- }
- final int index = (Integer) solTable.getValueAt(solTable.getSelectedRow(), SolTableModel.COL_INDEX);
- DataSetIdentifier selectedVar = (DataSetIdentifier)varList.getSelectedValue();
- final String varName = selectedVar.getName();
-
- AsynchClientTask task0 = new AsynchClientTask("clear", AsynchClientTask.TASKTYPE_SWING_BLOCKING) {
-
- @Override
- public void run(Hashtable hashTable) throws Exception {
- timePlotTableModel.setTimesAndValues(new double[0], new double[0]);
- }
- };
-
- AsynchClientTask task1 = new AsynchClientTask("retrieve data", AsynchClientTask.TASKTYPE_NONSWING_BLOCKING) {
-
- @Override
- public void run(Hashtable hashTable) throws Exception {
- double[] times = simData.getDataTimes();
- double[] values = new double[times.length];
- for (int i = 0; i < times.length; ++ i)
- {
- SimDataSet simDataBlock = simData.retrieveSimDataSet(times[i], varName);
- values[i] = simDataBlock.solValues[index];
- }
- hashTable.put("values", values);
- }
- };
-
- AsynchClientTask task2 = new AsynchClientTask("show data", AsynchClientTask.TASKTYPE_SWING_BLOCKING) {
-
- @Override
- public void run(Hashtable hashTable) throws Exception {
- timePlotLabel.setText("Varaible " + varName + " @ Index " + index);
- double[] times = simData.getDataTimes();
- double[] values = (double[]) hashTable.get("values");
- timePlotTableModel.setTimesAndValues(times, values);
- dataTabbedPane.setSelectedComponent(timePlotPanel);
- }
- };
- ClientTaskDispatcher.dispatch(this, new Hashtable(), new AsynchClientTask[] {task0, task1, task2}, false);
- }
-
-// private void readMeshMetricsFile(File userDir, VCSimulationDataIdentifier vcDataId, String simId) throws IOException
-// {
-// File meshMetricsFile = new File(userDir, vcDataId.getID() + ".chombo.memmetrics");
-// if (!meshMetricsFile.exists())
-// {
-// return;
-// }
-// BufferedReader br = null;
-// try
-// {
-// br = new BufferedReader(new FileReader(meshMetricsFile));
-// List cols = new ArrayList();
-// List values = new ArrayList();
-// String line = br.readLine();
-// if (line != null)
-// {
-// StringTokenizer st = new StringTokenizer(line, ",");
-// while (st.hasMoreTokens())
-// {
-// String token = st.nextToken();
-// cols.add(token);
-// }
-// }
-// while (true)
-// {
-// line = br.readLine();
-// if (line == null)
-// {
-// break;
-// }
-// double[] dvalues = new double[cols.size()];
-// StringTokenizer st = new StringTokenizer(line, ",");
-// int cnt = 0;
-// while (st.hasMoreTokens())
-// {
-// String token = st.nextToken();
-// dvalues[cnt] = Double.parseDouble(token);
-// ++ cnt;
-// }
-// assert cnt == cols.size();
-// values.add(dvalues);
-// }
-// meshMetricsTableModel.setData(cols, values);
-// }
-// finally
-// {
-// if (br != null)
-// {
-// br.close();
-// }
-// }
-// }
-
- private void retrieveData()
- {
- final Double time = (Double)timeComboBox.getSelectedItem();
- if (time == null)
- {
- return;
- }
- DataSetIdentifier selectedVar = (DataSetIdentifier)varList.getSelectedValue();
- if (selectedVar == null)
- {
- return;
- }
- final String varName = selectedVar.getName();
- AsynchClientTask task0 = new AsynchClientTask("clear", AsynchClientTask.TASKTYPE_SWING_BLOCKING) {
-
- @Override
- public void run(Hashtable hashTable) throws Exception {
- solTableModel.clear();
- timePlotTableModel.clear();
- solLabel.setText("Solution");
- timePlotLabel.setText("Time Plot");
- meanTextField.setText(null);
- maxErrorTextField.setText(null);
- sumVolFracTextField.setText(null);
- l2ErrorTextField.setText(null);
- }
- };
-
- AsynchClientTask task1 = new AsynchClientTask("retrieve data", AsynchClientTask.TASKTYPE_NONSWING_BLOCKING) {
-
- @Override
- public void run(Hashtable hashTable) throws Exception {
- if (timeComboBox.getSelectedIndex() < 0 || varList.getSelectedIndex() < 0)
- {
- return;
- }
- SimDataSet simDataSet = simData.retrieveSimDataSet(time, varName);
- hashTable.put("simDataSet", simDataSet);
- }
- };
-
- AsynchClientTask task2 = new AsynchClientTask("show data", AsynchClientTask.TASKTYPE_SWING_BLOCKING) {
-
- @Override
- public void run(Hashtable hashTable) throws Exception {
- SimDataSet simDataSet = (SimDataSet) hashTable.get("simDataSet");
- if (simDataSet == null)
- {
- return;
- }
- solLabel.setText("Variable " + varName + " @ Time " + time);
- solTableModel.setValues(simDataSet.solValues);
- meanTextField.setText(simDataSet.mean == null ? "" : simDataSet.mean.toString());
- sumVolFracTextField.setText(simDataSet.sumVolFrac == null ? "" : simDataSet.sumVolFrac.toString());
- maxErrorTextField.setText(simDataSet.maxError == null ? "" : simDataSet.maxError.toString());
- l2ErrorTextField.setText(simDataSet.l2Error == null ? "" : simDataSet.l2Error + "".toString());
- }
- };
- ClientTaskDispatcher.dispatch(this, new Hashtable(), new AsynchClientTask[] {task0, task1, task2}, false);
- }
-
- // TODO: Make as a test
- public static void main(String[] args) {
- if (args.length > 0 && Boolean.parseBoolean(args[0])){
- ChomboSimpleDataViewer.debug = true;
- }
- VCellLookAndFeel.setVCellLookAndFeel();
- ChomboSimpleDataViewer chomboSimpleDataViewer = new ChomboSimpleDataViewer();
- chomboSimpleDataViewer.setVisible(true);
- }
-}
diff --git a/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterface.java b/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterface.java
index 5507c124a9..4c2166b09b 100644
--- a/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterface.java
+++ b/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterface.java
@@ -1,8 +1,8 @@
package cbit.vcell.client.data;
import java.beans.PropertyChangeListener;
-import java.util.LinkedHashMap;
+import cbit.vcell.simdata.SummaryStatisticType;
import org.vcell.util.ObjectNotFoundException;
import cbit.vcell.math.FunctionColumnDescription;
@@ -12,13 +12,6 @@
import cbit.vcell.util.ColumnDescription;
public interface ODEDataInterface {
-
- public enum PlotType {
- Min,
- Max,
- Mean,
- Std
- }
void removePropertyChangeListener(PropertyChangeListener propertyChangeListener);
@@ -35,9 +28,8 @@ public enum PlotType {
FunctionColumnDescription[] getFunctionColumnDescriptions();
- LinkedHashMap parseHDF5File() throws ExpressionException,ObjectNotFoundException;
double[] extractColumn(String columnName) throws ExpressionException,ObjectNotFoundException;
- double[] extractColumn(String columnName, PlotType plotType) throws ExpressionException,ObjectNotFoundException;
+ double[] extractColumn(String columnName, SummaryStatisticType summaryStatisticType) throws ExpressionException,ObjectNotFoundException;
// double[] extractColumnMin(String columnName) throws ExpressionException,ObjectNotFoundException;
// double[] extractColumnStd(String columnName) throws ExpressionException,ObjectNotFoundException;
diff --git a/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterfaceImpl.java b/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterfaceImpl.java
index 112ac42a43..7342b94417 100644
--- a/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterfaceImpl.java
+++ b/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterfaceImpl.java
@@ -1,20 +1,9 @@
package cbit.vcell.client.data;
-import java.beans.PropertyChangeListener;
-import java.beans.PropertyChangeSupport;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.LinkedHashMap;
-import java.util.List;
-
-import org.vcell.util.ObjectNotFoundException;
-import org.vcell.util.document.VCDataIdentifier;
-
-import com.google.common.io.Files;
-
import cbit.vcell.math.FunctionColumnDescription;
import cbit.vcell.parser.ExpressionException;
+import cbit.vcell.simdata.MultiTrialNonspatialStochSimDataReader;
+import cbit.vcell.simdata.SummaryStatisticType;
import cbit.vcell.solver.DataSymbolMetadata;
import cbit.vcell.solver.SimulationModelInfo;
import cbit.vcell.solver.SimulationModelInfo.DataSymbolMetadataResolver;
@@ -22,10 +11,12 @@
import cbit.vcell.solver.ode.ODESimData;
import cbit.vcell.solver.ode.ODESolverResultSet;
import cbit.vcell.util.ColumnDescription;
-import ncsa.hdf.object.FileFormat;
-import ncsa.hdf.object.Group;
-import ncsa.hdf.object.HObject;
-import ncsa.hdf.object.h5.H5ScalarDS;
+import org.vcell.util.ObjectNotFoundException;
+import org.vcell.util.document.VCDataIdentifier;
+
+import java.beans.PropertyChangeListener;
+import java.beans.PropertyChangeSupport;
+import java.util.ArrayList;
class ODEDataInterfaceImpl implements ODEDataInterface {
@@ -107,117 +98,13 @@ public double[] extractColumn(String columnName) throws ExpressionException,Obje
}
@Override
- public LinkedHashMap parseHDF5File() {
- FileFormat hdf5FileFormat = null;
- File to = null;
- LinkedHashMap valueToIndexMap = new LinkedHashMap<>();
- try {
- ODESolverResultSet osrs = getOdeSolverResultSet();
- if(osrs instanceof ODESimData) {
- byte[] hdf5FileBytes = ((ODESimData)getOdeSolverResultSet()).getHdf5FileBytes();
- if(hdf5FileBytes != null) {
- to = File.createTempFile("odeStats_"+simulationModelInfo.getSimulationName(), ".hdf5");
- Files.write(hdf5FileBytes, to);
- FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
- if (fileFormat == null){
- throw new Exception("Cannot find HDF5 FileFormat.");
- }
- // open the file with read-only access
- hdf5FileFormat = fileFormat.createInstance(to.getAbsolutePath(), FileFormat.READ);
- // open the file and retrieve the file structure
- hdf5FileFormat.open();
- Group root = (Group)((javax.swing.tree.DefaultMutableTreeNode)hdf5FileFormat.getRootNode()).getUserObject();
- List postProcessMembers = ((Group)root).getMemberList();
-
- HObject varNames = null;
- for(HObject nextHObject : postProcessMembers) {
- if(nextHObject.getName().equals("VarNames")) {
- varNames = nextHObject;
- break;
- // SimTimes
- // StatMax
- // StatMean
- // StatMin
- // StatStdDev
- // VarNames
- }
- }
- H5ScalarDS h5ScalarDS = (H5ScalarDS)varNames;
- h5ScalarDS.init();
- try {
- long[] dims = h5ScalarDS.getDims();
- System.out.println("---"+varNames.getName()+" "+varNames.getClass().getName()+" Dimensions="+Arrays.toString(dims));
- Object obj = h5ScalarDS.read();
- String[] values = (String[])obj;
- for(int i=0; i postProcessMembers = ((Group)root).getMemberList();
- for(HObject nextHObject : postProcessMembers) {
- System.out.println(nextHObject.getName()+" "+nextHObject.getClass().getName());
- H5ScalarDS h5ScalarDS = (H5ScalarDS)nextHObject;
- h5ScalarDS.init();
- try {
- long[] dims = h5ScalarDS.getDims();
- System.out.println("---"+nextHObject.getName()+" "+nextHObject.getClass().getName()+" Dimensions="+Arrays.toString(dims));
- Object obj = h5ScalarDS.read();
- if(dims.length == 2) {
- double[] columns = new double[(int)dims[1]];
- for(int row=0;row postProcessMembers = ((Group)root).getMemberList();
- for(HObject nextHObject:postProcessMembers){
- //System.out.println(nextHObject.getName()+"\n"+nextHObject.getClass().getName());
- H5ScalarDS h5ScalarDS = (H5ScalarDS)nextHObject;
- h5ScalarDS.init();
- try {
- long[] dims = h5ScalarDS.getDims();
- System.out.println("---"+nextHObject.getName()+" "+nextHObject.getClass().getName()+" Dimensions="+Arrays.toString(dims));
- Object obj = h5ScalarDS.read();
- if(dims.length == 2) {
- //dims[0]=numTimes (will be the same as 'SimTimes' data length)
- //dims[1]=numVars (will be the same as 'VarNames' data length)
- //if name='StatMean' this is the same as the default data saved in the odeSolverresultSet
- double[] columns = new double[(int)dims[1]];
- for(int row=0;row data) throws NullPointerException, HDF5Exception {
+ double[] hdfData = org.apache.commons.lang.ArrayUtils.toPrimitive(((ArrayList)data).toArray(new Double[0]));
+ int hdf5DataspaceID = H5.H5Screate_simple(dims.length, dims, null);
+ int hdf5DatasetID = H5.H5Dcreate(hdf5GroupID, dataspaceName,HDF5Constants.H5T_NATIVE_DOUBLE, hdf5DataspaceID,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Dwrite_double(hdf5DatasetID, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, hdfData);
+ H5.H5Dclose(hdf5DatasetID);
+ H5.H5Sclose(hdf5DataspaceID);
+ }
+
+ /**
+ * Insert a dataset at the specififed group where the data are doubles (as an array)
+ *
+ * @param hdf5GroupID the id of the group to apply the dataset to
+ * @param dataspaceName name of the dataset
+ * @param dims dimentional meansurements
+ * @param data the data to fill the dataset
+ * @throws NullPointerException (unsure how this occurs)
+ * @throws HDF5Exception if the hdf5 library encounters something unusual
+ */
+ public static void insertDoubles(int hdf5GroupID,String dataspaceName,long[] dims,double[] data) throws NullPointerException, HDF5Exception {
+ int hdf5DataspaceID = H5.H5Screate_simple(dims.length, dims, null);
+ int hdf5DatasetID = H5.H5Dcreate(hdf5GroupID, dataspaceName,HDF5Constants.H5T_NATIVE_DOUBLE, hdf5DataspaceID,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Dwrite_double(hdf5DatasetID, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, (double[])data);
+ H5.H5Dclose(hdf5DatasetID);
+ H5.H5Sclose(hdf5DataspaceID);
+ }
+
+ /**
+ * Insert a dataset at the specififed group where the data are integers
+ *
+ * @param hdf5GroupID the id of the group to apply the dataset to
+ * @param dataspaceName name of the dataset
+ * @param dims dimentional meansurements
+ * @param data the data to fill the dataset
+ * @throws NullPointerException (unsure how this occurs)
+ * @throws HDF5Exception if the hdf5 library encounters something unusual
+ */
+ public static void insertInts(int hdf5GroupID,String dataspaceName,long[] dims,int[] data) throws NullPointerException, HDF5Exception {
+ int hdf5DataspaceID = H5.H5Screate_simple(dims.length, dims, null);
+ int hdf5DatasetID = H5.H5Dcreate(hdf5GroupID, dataspaceName,HDF5Constants.H5T_NATIVE_INT, hdf5DataspaceID,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Dwrite_int(hdf5DatasetID, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, (int[])data);
+ H5.H5Dclose(hdf5DatasetID);
+ H5.H5Sclose(hdf5DataspaceID);
+ }
+
+ /**
+ * Insert a dataset at the specififed group where the data are strings
+ *
+ * @param hdf5GroupID the id of the group to apply the dataset to
+ * @param datasetName name of the dataset
+ * @param dims dimentional meansurements
+ * @param data the data to fill the dataset
+ * @throws NullPointerException (unsure how this occurs)
+ * @throws HDF5Exception if the hdf5 library encounters something unusual
+ */
+ public static void insertStrings(int hdf5GroupID,String datasetName,long[] dims,List data) throws NullPointerException, HDF5Exception {
+ int largestStrLen = 0;
+ for(int i=0;i(Arrays.asList(new String[] {data})));
+ //String[] attr = data.toArray(new String[0]);
+
+ String attr = data + '\u0000';
+
+ //https://support.hdfgroup.org/ftp/HDF5/examples/misc-examples/vlstra.c
+ int h5attrcs1 = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ H5.H5Tset_size (h5attrcs1, attr.length() /*HDF5Constants.H5T_VARIABLE*/);
+ int dataspace_id = -1;
+ //dataspace_id = H5.H5Screate_simple(dims.length, dims,null);
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ int attribute_id = H5.H5Acreate(hdf5GroupID, attributeName, h5attrcs1, dataspace_id, HDF5Constants.H5P_DEFAULT,HDF5Constants.H5P_DEFAULT);
+ H5.H5Awrite(attribute_id, h5attrcs1, attr.getBytes());
+ H5.H5Sclose(dataspace_id);
+ H5.H5Aclose(attribute_id);
+ H5.H5Tclose(h5attrcs1);
+ }
+
+
/**
* @throws IOException
* @deprecated
@@ -531,8 +646,8 @@ private List exportPDEData(OutputContext outputContext, long jobID
for(int st = beginTimeIndex; st <= endTimeIndex; st++){
subTimes[st - beginTimeIndex] = allTimes[st];
}
- Hdf5Utils.insertDoubles(hdf5GroupID, PCS.TIMES.name(), new long[]{subTimes.length}, subTimes);//Hdf5Utils.writeHDF5Dataset(hdf5GroupID, PCS.TIMES.name(), new long[] {subTimes.length}, subTimes,false);
- Hdf5Utils.insertInts(hdf5GroupID, PCS.TIMEBOUNDS.name(), new long[]{2}, new int[]{beginTimeIndex, endTimeIndex});//Hdf5Utils.writeHDF5Dataset(hdf5GroupID, PCS.TIMEBOUNDS.name(), new long[] {2}, new int[] {beginTimeIndex,endTimeIndex},false);
+ insertDoubles(hdf5GroupID, PCS.TIMES.name(), new long[]{subTimes.length}, subTimes);//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupID, PCS.TIMES.name(), new long[] {subTimes.length}, subTimes,false);
+ insertInts(hdf5GroupID, PCS.TIMEBOUNDS.name(), new long[]{2}, new int[]{beginTimeIndex, endTimeIndex});//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupID, PCS.TIMEBOUNDS.name(), new long[] {2}, new int[] {beginTimeIndex,endTimeIndex},false);
}
switch(geometrySpecs.getModeID()){
@@ -846,10 +961,10 @@ private FileDataContainerID getCurveTimeSeries(int hdf5GroupVarID, PointsCurvesS
if(hdf5GroupVarID != -1){
try {
int hdf5GroupCurveID = H5.H5Gcreate(hdf5GroupVarID, getSpatialSelectionDescription(curve), HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
- Hdf5Utils.insertInts(hdf5GroupCurveID, PCS.CURVEINDEXES.name(), new long[]{((int[]) treePCS.get(PCS.CURVEINDEXES)).length}, (int[]) treePCS.get(PCS.CURVEINDEXES));//Hdf5Utils.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVEINDEXES.name(), new long[] {((int[])treePCS.get(PCS.CURVEINDEXES)).length}, (int[])treePCS.get(PCS.CURVEINDEXES),false);
- Hdf5Utils.insertDoubles(hdf5GroupCurveID, PCS.CURVEDISTANCES.name(), new long[]{((double[]) treePCS.get(PCS.CURVEDISTANCES)).length}, (double[]) treePCS.get(PCS.CURVEDISTANCES));//Hdf5Utils.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVEDISTANCES.name(), new long[] {((double[])treePCS.get(PCS.CURVEDISTANCES)).length}, (double[])treePCS.get(PCS.CURVEDISTANCES),false);
+ insertInts(hdf5GroupCurveID, PCS.CURVEINDEXES.name(), new long[]{((int[]) treePCS.get(PCS.CURVEINDEXES)).length}, (int[]) treePCS.get(PCS.CURVEINDEXES));//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVEINDEXES.name(), new long[] {((int[])treePCS.get(PCS.CURVEINDEXES)).length}, (int[])treePCS.get(PCS.CURVEINDEXES),false);
+ insertDoubles(hdf5GroupCurveID, PCS.CURVEDISTANCES.name(), new long[]{((double[]) treePCS.get(PCS.CURVEDISTANCES)).length}, (double[]) treePCS.get(PCS.CURVEDISTANCES));//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVEDISTANCES.name(), new long[] {((double[])treePCS.get(PCS.CURVEDISTANCES)).length}, (double[])treePCS.get(PCS.CURVEDISTANCES),false);
if(treePCS.get(PCS.CURVECROSSMEMBRINDEX) != null){
- Hdf5Utils.insertInts(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name(), new long[]{((int[]) treePCS.get(PCS.CURVECROSSMEMBRINDEX)).length}, (int[]) treePCS.get(PCS.CURVECROSSMEMBRINDEX));//Hdf5Utils.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name(), new long[] {((int[])treePCS.get(PCS.CURVECROSSMEMBRINDEX)).length}, (int[])treePCS.get(PCS.CURVECROSSMEMBRINDEX),false);
+ insertInts(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name(), new long[]{((int[]) treePCS.get(PCS.CURVECROSSMEMBRINDEX)).length}, (int[]) treePCS.get(PCS.CURVECROSSMEMBRINDEX));//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name(), new long[] {((int[])treePCS.get(PCS.CURVECROSSMEMBRINDEX)).length}, (int[])treePCS.get(PCS.CURVECROSSMEMBRINDEX),false);
ArrayList crossPoints = new ArrayList();
for(int i = 0; i < crossingMembraneIndexes.length; i++){
if(crossingMembraneIndexes[i] != -1){
@@ -857,9 +972,9 @@ private FileDataContainerID getCurveTimeSeries(int hdf5GroupVarID, PointsCurvesS
}
}
String attrText = PCS.CURVEVALS.name() + " columns " + crossPoints.get(0) + " and " + crossPoints.get(1) + " are added points of interpolation near membrane";
- Hdf5Utils.insertAttribute(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name() + " Info", attrText); //Hdf5Utils.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name()+" Info", null, attrText,true);
+ insertAttribute(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name() + " Info", attrText); //UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name()+" Info", null, attrText,true);
}
- Hdf5Utils.insertDoubles(hdf5GroupCurveID, PCS.CURVEVALS.name(), new long[]{endIndex - beginIndex + 1, ((int[]) treePCS.get(PCS.CURVEINDEXES)).length}, (ArrayList) treePCS.get(PCS.CURVEVALS));//Hdf5Utils.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVEVALS.name(), new long[] {endIndex-beginIndex+1,((int[])treePCS.get(PCS.CURVEINDEXES)).length}, (ArrayList)treePCS.get(PCS.CURVEVALS),false);
+ insertDoubles(hdf5GroupCurveID, PCS.CURVEVALS.name(), new long[]{endIndex - beginIndex + 1, ((int[]) treePCS.get(PCS.CURVEINDEXES)).length}, (ArrayList) treePCS.get(PCS.CURVEVALS));//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVEVALS.name(), new long[] {endIndex-beginIndex+1,((int[])treePCS.get(PCS.CURVEINDEXES)).length}, (ArrayList)treePCS.get(PCS.CURVEVALS),false);
H5.H5Gclose(hdf5GroupCurveID);
} catch(Exception e){
throw new DataAccessException(e.getMessage(), e);
@@ -1054,9 +1169,9 @@ private FileDataContainerID getPointsTimeSeries(PointsCurvesSlices pcs, int hdf5
}
if(hdf5GroupID != -1){
long[] dimsCoord = new long[]{1, pointSpatialSelections.length};
- Hdf5Utils.insertStrings(hdf5GroupID, PCS.POINTINFO.name(), dimsCoord, (ArrayList) pcs.data.get(PCS.POINTINFO));//Hdf5Utils.writeHDF5Dataset(hdf5GroupID, PCS.POINTINFO.name(), dimsCoord, pcs.data.get(PCS.POINTINFO),false);
+ insertStrings(hdf5GroupID, PCS.POINTINFO.name(), dimsCoord, (ArrayList) pcs.data.get(PCS.POINTINFO));//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupID, PCS.POINTINFO.name(), dimsCoord, pcs.data.get(PCS.POINTINFO),false);
long[] dimsValues = new long[]{hdfTimes.length, pointSpatialSelections.length};
- Hdf5Utils.insertDoubles(hdf5GroupID, PCS.POINTVALS.name(), dimsValues, hdfValues);//Hdf5Utils.writeHDF5Dataset(hdf5GroupID, PCS.POINTVALS.name(), dimsValues, hdfValues,false);
+ insertDoubles(hdf5GroupID, PCS.POINTVALS.name(), dimsValues, hdfValues);//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupID, PCS.POINTVALS.name(), dimsValues, hdfValues,false);
}
}
diff --git a/vcell-core/src/main/java/cbit/vcell/resource/PropertyLoader.java b/vcell-core/src/main/java/cbit/vcell/resource/PropertyLoader.java
index 0d65f24b1b..ccd75d0186 100644
--- a/vcell-core/src/main/java/cbit/vcell/resource/PropertyLoader.java
+++ b/vcell-core/src/main/java/cbit/vcell/resource/PropertyLoader.java
@@ -272,8 +272,6 @@ public static void setConfigProvider(VCellConfigProvider configProvider) {
public static final String nagiosMonitorPort = record("test.monitor.port", ValueType.GEN);
public static final String imageJVcellPluginURL = record("vcell.imagej.plugin.url", ValueType.GEN);
-
- public static final String webDataServerPort = record("vcelldata.web.server.port", ValueType.GEN);
public static final String cmdSrvcSshCmdTimeoutMS = record("vcell.ssh.cmd.cmdtimeout", ValueType.GEN);
public static final String cmdSrvcSshCmdRestoreTimeoutFactor = record("vcell.ssh.cmd.restoretimeout", ValueType.GEN);
diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimDataReader.java b/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimDataReader.java
new file mode 100644
index 0000000000..61ac310a96
--- /dev/null
+++ b/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimDataReader.java
@@ -0,0 +1,336 @@
+package cbit.vcell.simdata;
+
+import cbit.vcell.math.InsideVariable;
+import cbit.vcell.math.OutsideVariable;
+import cbit.vcell.math.Variable;
+import ncsa.hdf.object.*;
+import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
+import org.apache.commons.compress.archivers.zip.ZipFile;
+import org.vcell.util.DataAccessException;
+
+import javax.swing.tree.DefaultMutableTreeNode;
+import java.io.*;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Vector;
+import java.util.zip.ZipEntry;
+
+public class ChomboSimDataReader {
+ private static final String HDF5_GROUP_SOLUTION = "/solution";
+ private static final String HDF5_GROUP_EXTRAPOLATED_VOLUMES = "/extrapolated_volumes";
+ private static final String HDF5_GROUP_DIRECTORY_SEPARATOR = "/";
+
+ /**
+ * Creates a relative path to the solution to the variable specified
+ *
+ * @param varName the name of the variable to path to.
+ * @return the relative path
+ */
+ public static String getVarSolutionPath(String varName){
+ return HDF5_GROUP_SOLUTION + HDF5_GROUP_DIRECTORY_SEPARATOR + Variable.getNameFromCombinedIdentifier(varName);
+ }
+
+ /**
+ * Creates a relative path to the extrapolated values of a given variable name.
+ *
+ * @param varName name of the variable to path to
+ * @return the relative path
+ */
+ public static String getVolVarExtrapolatedValuesPath(String varName){
+ return HDF5_GROUP_EXTRAPOLATED_VOLUMES + HDF5_GROUP_DIRECTORY_SEPARATOR + "__" + Variable.getNameFromCombinedIdentifier(varName) + "_extrapolated__";
+ }
+
+
+ public static void getNextDataAtCurrentTimeChombo(double[][] returnValues, ZipFile currentZipFile, String[] varNames, int[][] varIndexes, String[] simDataFileNames, int masterTimeIndex) throws Exception {
+ File tempFile = null;
+ FileFormat solFile = null;
+ try {
+ tempFile = createTempHdf5File(currentZipFile, simDataFileNames[masterTimeIndex]);
+
+ FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
+ solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
+ solFile.open();
+
+ for(int k = 0; k < varNames.length; ++ k) {
+ try {
+ boolean bExtrapolatedValue = false;
+ String varName = varNames[k];
+ if (varName.endsWith(InsideVariable.INSIDE_VARIABLE_SUFFIX))
+ {
+ bExtrapolatedValue = true;
+ varName = varName.substring(0, varName.lastIndexOf(InsideVariable.INSIDE_VARIABLE_SUFFIX));
+ }
+ else if (varName.endsWith(OutsideVariable.OUTSIDE_VARIABLE_SUFFIX))
+ {
+ bExtrapolatedValue = true;
+ varName = varName.substring(0, varName.lastIndexOf(OutsideVariable.OUTSIDE_VARIABLE_SUFFIX));
+ }
+ double[] sol = null;
+ if (bExtrapolatedValue)
+ {
+ sol = readChomboExtrapolatedValues(varName, solFile);
+ }
+ else
+ {
+ String varPath = getVarSolutionPath(varNames[k]);
+ HObject solObj = FileFormat.findObject(solFile, varPath);
+ if (solObj instanceof Dataset) {
+ Dataset dataset = (Dataset)solObj;
+ sol = (double[]) dataset.read();
+ }
+ }
+ if (sol != null)
+ {
+ for(int l = 0;l < varIndexes[k].length; ++ l) {
+ int idx = varIndexes[k][l];
+ double val = sol[idx];
+ returnValues[k][l] = val;
+ }
+ }
+ } catch (Exception e) {
+ throw new DataAccessException(e.getMessage(), e);
+ }
+ }
+ } finally {
+ try {
+ if (solFile != null) {
+ solFile.close();
+ }
+ if (tempFile != null) {
+ if (!tempFile.delete()) {
+ System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath());
+ }
+ }
+ } catch(Exception e) {
+ // ignore
+ }
+ }
+ }
+
+ public static void readHdf5SolutionMetaData(InputStream is, Vector dataBlockList) throws Exception
+ {
+ File tempFile = null;
+ FileFormat solFile = null;
+ try{
+ tempFile = createTempHdf5File(is);
+
+ FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
+ solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
+ solFile.open();
+ DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode();
+ Group rootGroup = (Group)rootNode.getUserObject();
+ List solGroups = rootGroup.getMemberList();
+
+ for (HObject memberGroup : solGroups)
+ {
+ if (memberGroup instanceof Group && memberGroup.getName().equals("solution"))
+ {
+ Group solGroup = (Group) memberGroup;
+ List memberList = solGroup.getMemberList();
+ for (HObject member : memberList)
+ {
+ if (!(member instanceof Dataset)){
+ continue;
+ }
+ Dataset dataset = (Dataset)member;
+ String dsname = dataset.getName();
+ int vt = -1;
+ String domain = null;
+ List solAttrList = dataset.getMetadata();
+ for (Attribute attr : solAttrList)
+ {
+ String attrName = attr.getName();
+ if(attrName.equals("variable type")){
+ Object obj = attr.getValue();
+ vt = ((int[])obj)[0];
+ } else if (attrName.equals("domain")) {
+ Object obj = attr.getValue();
+ domain = ((String[])obj)[0];
+ }
+ }
+ long[] dims = dataset.getDims();
+ String varName = domain == null ? dsname : domain + Variable.COMBINED_IDENTIFIER_SEPARATOR + dsname;
+ dataBlockList.addElement(cbit.vcell.simdata.DataBlock.createDataBlock(varName, vt, (int) dims[0], 0));
+ }
+ break;
+ }
+ }
+ } finally {
+ try {
+ if (solFile != null) {
+ solFile.close();
+ }
+ if (tempFile != null) {
+ if (!tempFile.delete()) {
+ System.err.println("couldn't delete temp file " + tempFile);
+ }
+ }
+ } catch(Exception e) {
+ // ignore
+ }
+ }
+ }
+
+ public static double[] readHdf5VariableSolution(File zipfile, String fileName, String varName) throws Exception{
+
+ File tempFile = null;
+ FileFormat solFile = null;
+ try{
+ tempFile = createTempHdf5File(zipfile, fileName);
+
+ FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
+ solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
+ solFile.open();
+ if (varName != null)
+ {
+ String varPath = getVarSolutionPath(varName);
+ HObject solObj = FileFormat.findObject(solFile, varPath);
+ if (solObj instanceof Dataset)
+ {
+ Dataset dataset = (Dataset)solObj;
+ return (double[]) dataset.read();
+ }
+ }
+ } finally {
+ try {
+ if (solFile != null) {
+ solFile.close();
+ }
+ if (tempFile != null) {
+ if (!tempFile.delete()) {
+ System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath());
+ }
+ }
+ } catch(Exception e) {
+ // ignore
+ }
+ }
+ return null;
+ }
+
+ public static double[] readChomboExtrapolatedValues(String varName, File pdeFile, File zipFile) throws IOException {
+ double[] data = null;
+ if (zipFile != null && DataSet.isChombo(zipFile)) {
+ File tempFile = null;
+ FileFormat solFile = null;
+ try{
+ tempFile = createTempHdf5File(zipFile, pdeFile.getName());
+
+ FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
+ solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
+ solFile.open();
+ data = readChomboExtrapolatedValues(varName, solFile);
+ } catch(Exception e) {
+ throw new IOException(e.getMessage(), e);
+ } finally {
+ try {
+ if (solFile != null) {
+ solFile.close();
+ }
+ if (tempFile != null) {
+ if (!tempFile.delete()) {
+ System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath());
+ }
+ }
+ } catch(Exception e) {
+ // ignore
+ }
+ }
+ }
+ return data;
+ }
+
+ private static double[] readChomboExtrapolatedValues(String varName, FileFormat solFile) throws Exception {
+ double data[] = null;
+ if (varName != null)
+ {
+ String varPath = getVolVarExtrapolatedValuesPath(varName);
+ HObject solObj = FileFormat.findObject(solFile, varPath);
+ if (solObj == null)
+ {
+ throw new IOException("Extrapolated values for variable '" + varName + "' does not exist in the results.");
+ }
+ if (solObj instanceof Dataset)
+ {
+ Dataset dataset = (Dataset)solObj;
+ return (double[]) dataset.read();
+ }
+ }
+ return data;
+ }
+
+ private static File createTempHdf5File(File zipFile, String fileName) throws IOException
+ {
+ ZipFile zipZipFile = null;
+ try
+ {
+ zipZipFile = DataSet.openZipFile(zipFile);
+ return createTempHdf5File(zipZipFile, fileName);
+ }
+ finally
+ {
+ try
+ {
+ if (zipZipFile != null)
+ {
+ zipZipFile.close();
+ }
+ }
+ catch (Exception ex)
+ {
+ // ignore
+ }
+ }
+ }
+
+ private static File createTempHdf5File(ZipFile zipFile, String fileName) throws IOException
+ {
+ InputStream is = null;
+ try
+ {
+ ZipEntry dataEntry = zipFile.getEntry(fileName);
+ is = zipFile.getInputStream((ZipArchiveEntry) dataEntry);
+ return createTempHdf5File(is);
+ }
+ finally
+ {
+ try
+ {
+ if (is != null)
+ {
+ is.close();
+ }
+ }
+ catch (Exception ex)
+ {
+ // ignore
+ }
+ }
+ }
+
+ private static File createTempHdf5File(InputStream is) throws IOException
+ {
+ OutputStream out = null;
+ try{
+ File tempFile = File.createTempFile("temp", "hdf5");
+ out=new FileOutputStream(tempFile);
+ byte buf[] = new byte[1024];
+ int len;
+ while((len=is.read(buf))>0) {
+ out.write(buf,0,len);
+ }
+ return tempFile;
+ }
+ finally
+ {
+ try {
+ if (out != null) {
+ out.close();
+ }
+ } catch (Exception ex) {
+ // ignore
+ }
+ }
+ }
+
+}
diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/SimulationDataSpatialHdf5.java b/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimpleSimDataReader_NotUsed.java
similarity index 87%
rename from vcell-core/src/main/java/cbit/vcell/simdata/SimulationDataSpatialHdf5.java
rename to vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimpleSimDataReader_NotUsed.java
index 31272bc783..f3d83973c1 100644
--- a/vcell-core/src/main/java/cbit/vcell/simdata/SimulationDataSpatialHdf5.java
+++ b/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimpleSimDataReader_NotUsed.java
@@ -1,23 +1,13 @@
package cbit.vcell.simdata;
-import java.io.BufferedInputStream;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.StringTokenizer;
-import java.util.Vector;
-import java.util.zip.ZipEntry;
-
-import javax.swing.tree.DefaultMutableTreeNode;
-
+import cbit.vcell.math.Variable;
+import cbit.vcell.math.Variable.Domain;
+import cbit.vcell.math.VariableType;
+import cbit.vcell.mongodb.VCMongoMessage;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.object.*;
+import ncsa.hdf.object.h5.H5CompoundDS;
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
-//import java.util.zip.ZipFile;
import org.apache.commons.compress.archivers.zip.ZipFile;
import org.vcell.chombo.ChomboBox;
import org.vcell.util.DataAccessException;
@@ -26,19 +16,12 @@
import org.vcell.util.Origin;
import org.vcell.util.document.VCDataIdentifier;
-import cbit.vcell.math.Variable;
-import cbit.vcell.math.Variable.Domain;
-import cbit.vcell.math.VariableType;
-import cbit.vcell.mongodb.VCMongoMessage;
-import ncsa.hdf.hdf5lib.H5;
-import ncsa.hdf.object.Attribute;
-import ncsa.hdf.object.Dataset;
-import ncsa.hdf.object.FileFormat;
-import ncsa.hdf.object.Group;
-import ncsa.hdf.object.HObject;
-import ncsa.hdf.object.h5.H5CompoundDS;
+import javax.swing.tree.DefaultMutableTreeNode;
+import java.io.*;
+import java.util.*;
+import java.util.zip.ZipEntry;
-public class SimulationDataSpatialHdf5
+public class ChomboSimpleSimDataReader_NotUsed
{
public static class SimLogFileEntry
{
@@ -168,7 +151,7 @@ public static class SimDataSet
private long logFileLength = 0;
private ChomboMesh chomboMesh;
- public SimulationDataSpatialHdf5(VCDataIdentifier argVCDataID, File primaryUserDir, File secondaryUserDir)
+ public ChomboSimpleSimDataReader_NotUsed(VCDataIdentifier argVCDataID, File primaryUserDir, File secondaryUserDir)
throws IOException, DataAccessException
{
this.vcDataId = argVCDataID;
@@ -184,11 +167,11 @@ public SimulationDataSpatialHdf5(VCDataIdentifier argVCDataID, File primaryUserD
"does not exist in primary [" + primaryUserDir + "] or secondary [" + secondaryUserDir + "] user directory .");
}
}
- VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.SimulationDataSpatialHdf5() <>");
+ VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.ChomboSimpleSimDataReader_NotUsed() <>");
}
public synchronized void readVarAndFunctionDataIdentifiers() throws Exception {
- VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.readVarAndFunctionDataIdentifiers Entry");
+ VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.readVarAndFunctionDataIdentifiers Entry");
readLogFile();
if(chomboMesh == null){
chomboMesh = readMeshFile(new File(userDirectory, getMeshFileName()));
@@ -371,15 +354,15 @@ private File findLogFile() {
if (logFile == null)
{
logFile = new File(userDirectory, getLogFileName());
- VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.getLogFile() <> calling logile.exists()");
+ VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.getLogFile() <> calling logile.exists()");
if (logFile.exists())
{
- VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.getLogFile() <> file found");
+ VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.getLogFile() <> file found");
}
else
{
logFile = null;
- VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.getLogFile() <> file found");
+ VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.getLogFile() <> file found");
}
}
@@ -391,16 +374,16 @@ private File findLogFile() {
* @throws IOException
*/
private synchronized void readLogFile() throws DataAccessException, IOException {
- VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.readLog() <>");
+ VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.readLog() <>");
if (logFile == null){
- VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.readLog() log file not found <>");
+ VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.readLog() log file not found <>");
throw new DataAccessException("log file not found for " + vcDataId);
}
- VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.readLog() logFile exists");
+ VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.readLog() logFile exists");
long length = logFile.length();
long lastModified = logFile.lastModified();
if (lastModified == logFileLastModified && logFileLength == length) {
- VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.readLog() hasn't been modified ... <>");
+ VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.readLog() hasn't been modified ... <>");
return;
}
@@ -414,13 +397,13 @@ private synchronized void readLogFile() throws DataAccessException, IOException
//
String logfileContent = FileUtils.readFileToString(logFile);
if (logfileContent.length() != logFileLength){
- System.out.println("SimulationDataSpatialHdf5.readLog(), read "+logfileContent.length()+" of "+logFileLength+" bytes of log file");
+ System.out.println("ChomboSimpleSimDataReader_NotUsed.readLog(), read "+logfileContent.length()+" of "+logFileLength+" bytes of log file");
}
StringTokenizer st = new StringTokenizer(logfileContent);
// so parse into 'dataFilenames' and 'dataTimes' arrays
if (st.countTokens() % 4 != 0) {
- throw new DataAccessException("SimulationDataSpatialHdf5.readLog(), tokens in each line should be factor of 4");
+ throw new DataAccessException("ChomboSimpleSimDataReader_NotUsed.readLog(), tokens in each line should be factor of 4");
}
while (st.hasMoreTokens()){
@@ -430,7 +413,7 @@ private synchronized void readLogFile() throws DataAccessException, IOException
double time = Double.parseDouble(st.nextToken());
logfileEntryList.add(new SimLogFileEntry(iteration, simFileName, zipFileName, time));
}
- VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.readLog() <>");
+ VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.readLog() <>");
}
public double[] getDataTimes() {
diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/DataSet.java b/vcell-core/src/main/java/cbit/vcell/simdata/DataSet.java
index 7927f3a58e..ba34007b97 100644
--- a/vcell-core/src/main/java/cbit/vcell/simdata/DataSet.java
+++ b/vcell-core/src/main/java/cbit/vcell/simdata/DataSet.java
@@ -9,42 +9,17 @@
*/
package cbit.vcell.simdata;
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.List;
-import java.util.Vector;
-//import java.util.zip.ZipEntry;
-import java.util.zip.ZipEntry;
-//import java.util.zip.ZipFile;
-
-import javax.swing.tree.DefaultMutableTreeNode;
-
-import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
-import org.apache.commons.compress.archivers.zip.ZipFile;
-import cbit.vcell.math.Variable;
import cbit.vcell.math.VariableType;
import cbit.vcell.simdata.SimulationData.SolverDataType;
-import cbit.vcell.solvers.CartesianMeshMovingBoundary.MBSDataGroup;
-import cbit.vcell.solvers.CartesianMeshMovingBoundary.MSBDataAttribute;
-import cbit.vcell.solvers.CartesianMeshMovingBoundary.MSBDataAttributeValue;
-import ncsa.hdf.object.Attribute;
-import ncsa.hdf.object.Dataset;
-import ncsa.hdf.object.FileFormat;
-import ncsa.hdf.object.Group;
-import ncsa.hdf.object.HObject;
+import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
+import org.apache.commons.compress.archivers.zip.ZipFile;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
+import java.io.*;
+import java.util.Vector;
+
public class DataSet implements java.io.Serializable
{
private final static Logger lg = LogManager.getLogger(DataSet.class);
@@ -85,7 +60,7 @@ public static double[] fetchSimData(String varName, File file) throws IOExceptio
if (solverDataType == SolverDataType.MBSData)
{
try {
- data = readMBSData(varName, time);
+ data = MovingBoundarySimDataReader.readMBSData(fileName, dataBlockList, varName, time);
} catch(Exception e) {
throw new IOException(e.getMessage(), e);
}
@@ -94,7 +69,7 @@ public static double[] fetchSimData(String varName, File file) throws IOExceptio
{
if (zipFile != null && isChombo(zipFile)) {
try {
- data = readHdf5VariableSolution(zipFile, new File(fileName).getName(), varName);
+ data = ChomboSimDataReader.readHdf5VariableSolution(zipFile, new File(fileName).getName(), varName);
} catch(Exception e) {
throw new IOException(e.getMessage(), e);
}
@@ -250,7 +225,7 @@ int[] getVariableTypeIntegers() {
* Creation date: (6/23/2004 9:37:26 AM)
* @return java.util.zip.ZipFile
*/
-protected static ZipFile openZipFile(File zipFile) throws IOException {
+public static ZipFile openZipFile(File zipFile) throws IOException {
for (int i = 0; i < 20; i ++) {
try {
return new org.apache.commons.compress.archivers.zip.ZipFile(zipFile);
@@ -288,7 +263,7 @@ void read(File file, File zipFile, SolverDataType solverDataType) throws IOExcep
if (solverDataType == SolverDataType.MBSData)
{
try {
- readMBSDataMetadata();
+ MovingBoundarySimDataReader.readMBSDataMetadata(fileName, dataBlockList);
} catch (Exception e) {
throw new IOException(e.getMessage(),e);
}
@@ -324,7 +299,7 @@ void read(File file, File zipFile, SolverDataType solverDataType) throws IOExcep
if(is != null && zipFile!=null && isChombo(zipFile)){
try {
- readHdf5SolutionMetaData(is);
+ ChomboSimDataReader.readHdf5SolutionMetaData(is, dataBlockList);
} catch (Exception e) {
throw new IOException(e.getMessage(),e);
}
@@ -349,187 +324,12 @@ void read(File file, File zipFile, SolverDataType solverDataType) throws IOExcep
}
}
-private static boolean isChombo(File zipFile){
+public static boolean isChombo(File zipFile){
return zipFile.getName().endsWith(".hdf5.zip");
}
-private static File createTempHdf5File(InputStream is) throws IOException
-{
- OutputStream out = null;
- try{
- File tempFile = File.createTempFile("temp", "hdf5");
- out=new FileOutputStream(tempFile);
- byte buf[] = new byte[1024];
- int len;
- while((len=is.read(buf))>0) {
- out.write(buf,0,len);
- }
- return tempFile;
- }
- finally
- {
- try {
- if (out != null) {
- out.close();
- }
- } catch (Exception ex) {
- // ignore
- }
- }
-}
-
-static File createTempHdf5File(ZipFile zipFile, String fileName) throws IOException
-{
- InputStream is = null;
- try
- {
- ZipEntry dataEntry = zipFile.getEntry(fileName);
- is = zipFile.getInputStream((ZipArchiveEntry) dataEntry);
- return createTempHdf5File(is);
- }
- finally
- {
- try
- {
- if (is != null)
- {
- is.close();
- }
- }
- catch (Exception ex)
- {
- // ignore
- }
- }
-}
-
-private static File createTempHdf5File(File zipFile, String fileName) throws IOException
-{
- ZipFile zipZipFile = null;
- try
- {
- zipZipFile = openZipFile(zipFile);
- return createTempHdf5File(zipZipFile, fileName);
- }
- finally
- {
- try
- {
- if (zipZipFile != null)
- {
- zipZipFile.close();
- }
- }
- catch (Exception ex)
- {
- // ignore
- }
- }
-}
-
-private void readHdf5SolutionMetaData(InputStream is) throws Exception
-{
- File tempFile = null;
- FileFormat solFile = null;
- try{
- tempFile = createTempHdf5File(is);
-
- FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
- solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
- solFile.open();
- DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode();
- Group rootGroup = (Group)rootNode.getUserObject();
- List solGroups = rootGroup.getMemberList();
-
- for (HObject memberGroup : solGroups)
- {
- if (memberGroup instanceof Group && memberGroup.getName().equals("solution"))
- {
- Group solGroup = (Group) memberGroup;
- List memberList = solGroup.getMemberList();
- for (HObject member : memberList)
- {
- if (!(member instanceof Dataset)){
- continue;
- }
- Dataset dataset = (Dataset)member;
- String dsname = dataset.getName();
- int vt = -1;
- String domain = null;
- List solAttrList = dataset.getMetadata();
- for (Attribute attr : solAttrList)
- {
- String attrName = attr.getName();
- if(attrName.equals("variable type")){
- Object obj = attr.getValue();
- vt = ((int[])obj)[0];
- } else if (attrName.equals("domain")) {
- Object obj = attr.getValue();
- domain = ((String[])obj)[0];
- }
- }
- long[] dims = dataset.getDims();
- String varName = domain == null ? dsname : domain + Variable.COMBINED_IDENTIFIER_SEPARATOR + dsname;
- dataBlockList.addElement(DataBlock.createDataBlock(varName, vt, (int) dims[0], 0));
- }
- break;
- }
- }
- } finally {
- try {
- if (solFile != null) {
- solFile.close();
- }
- if (tempFile != null) {
- if (!tempFile.delete()) {
- System.err.println("couldn't delete temp file " + tempFile);
- }
- }
- } catch(Exception e) {
- // ignore
- }
- }
-}
-
-
-static double[] readHdf5VariableSolution(File zipfile, String fileName, String varName) throws Exception{
-
- File tempFile = null;
- FileFormat solFile = null;
- try{
- tempFile = createTempHdf5File(zipfile, fileName);
-
- FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
- solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
- solFile.open();
- if (varName != null)
- {
- String varPath = Hdf5Utils.getVarSolutionPath(varName);
- HObject solObj = FileFormat.findObject(solFile, varPath);
- if (solObj instanceof Dataset)
- {
- Dataset dataset = (Dataset)solObj;
- return (double[]) dataset.read();
- }
- }
- } finally {
- try {
- if (solFile != null) {
- solFile.close();
- }
- if (tempFile != null) {
- if (!tempFile.delete()) {
- System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath());
- }
- }
- } catch(Exception e) {
- // ignore
- }
- }
- return null;
-}
-public static void writeNew(File file, String[] varNameArr, VariableType[] varTypeArr, org.vcell.util.ISize size, double[][] dataArr) throws IOException {
+ public static void writeNew(File file, String[] varNameArr, VariableType[] varTypeArr, org.vcell.util.ISize size, double[][] dataArr) throws IOException {
FileOutputStream fos = null;
BufferedOutputStream bos = null;
@@ -580,289 +380,4 @@ public static void writeNew(File file, String[] varNameArr, VariableType[] varTy
}
}
- static double[] readChomboExtrapolatedValues(String varName, File pdeFile, File zipFile) throws IOException {
- double[] data = null;
- if (zipFile != null && isChombo(zipFile)) {
- File tempFile = null;
- FileFormat solFile = null;
- try{
- tempFile = createTempHdf5File(zipFile, pdeFile.getName());
-
- FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
- solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
- solFile.open();
- data = readChomboExtrapolatedValues(varName, solFile);
- } catch(Exception e) {
- throw new IOException(e.getMessage(), e);
- } finally {
- try {
- if (solFile != null) {
- solFile.close();
- }
- if (tempFile != null) {
- if (!tempFile.delete()) {
- System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath());
- }
- }
- } catch(Exception e) {
- // ignore
- }
- }
- }
- return data;
- }
-
- static double[] readChomboExtrapolatedValues(String varName, FileFormat solFile) throws Exception {
- double data[] = null;
- if (varName != null)
- {
- String varPath = Hdf5Utils.getVolVarExtrapolatedValuesPath(varName);
- HObject solObj = FileFormat.findObject(solFile, varPath);
- if (solObj == null)
- {
- throw new IOException("Extrapolated values for variable '" + varName + "' does not exist in the results.");
- }
- if (solObj instanceof Dataset)
- {
- Dataset dataset = (Dataset)solObj;
- return (double[]) dataset.read();
- }
- }
- return data;
- }
-
- private void readMBSDataMetadata() throws Exception
- {
- FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
- FileFormat solFile = null;
- try {
- solFile = fileFormat.createInstance(fileName, FileFormat.READ);
- solFile.open();
- DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode();
- Group rootGroup = (Group)rootNode.getUserObject();
- Group solutionGroup = null;
- for (Object member : rootGroup.getMemberList())
- {
- String memberName = ((HObject)member).getName();
- if (member instanceof Group)
- {
- MBSDataGroup group = MBSDataGroup.valueOf(memberName);
- if (group == MBSDataGroup.Solution)
- {
- solutionGroup = (Group) member;
- break;
- }
- }
- }
- if (solutionGroup == null)
- {
- throw new Exception("Group " + MBSDataGroup.Solution + " not found");
- }
-
- // find any timeGroup
- Group timeGroup = null;
- for (Object member : solutionGroup.getMemberList())
- {
- String memberName = ((HObject)member).getName();
- if (member instanceof Group && memberName.startsWith("time"))
- {
- timeGroup = (Group) member;
- break;
- }
- }
-
- if (timeGroup == null)
- {
- throw new Exception("No time group found");
- }
-
- // find all the datasets in that time group
- for (Object member : timeGroup.getMemberList())
- {
- if (member instanceof Dataset)
- {
- List solAttrList = ((Dataset)member).getMetadata();
- int size = 0;
- String varName = null;
- VariableType varType = null;
- for (Attribute attr : solAttrList)
- {
- String attrName = attr.getName();
- Object attrValue = attr.getValue();
- if(attrName.equals(MSBDataAttribute.name.name()))
- {
- varName = ((String[]) attrValue)[0];
- }
- else if (attrName.equals(MSBDataAttribute.size.name()))
- {
- size = ((int[]) attrValue)[0];
- }
- else if (attrName.equals(MSBDataAttribute.type.name()))
- {
- String vt = ((String[]) attrValue)[0];
- if (vt.equals(MSBDataAttributeValue.Point.name()))
- {
- varType = VariableType.POINT_VARIABLE;
- }
- else if (vt.equals(MSBDataAttributeValue.Volume.name()))
- {
- varType = VariableType.VOLUME;
- }
- else if (vt.equals(MSBDataAttributeValue.PointSubDomain.name()))
- {
- // Position for PointSubdomain
- }
- }
- }
- if (varType == VariableType.VOLUME)
- {
- // only display volume
- dataBlockList.addElement(DataBlock.createDataBlock(varName, varType.getType(), size, 0));
- }
- if (varType == VariableType.POINT_VARIABLE)
- {
- // only display volume
- dataBlockList.addElement(DataBlock.createDataBlock(varName, varType.getType(), size, 0));
- }
-
- }
- }
- }
- finally
- {
- if (solFile != null)
- {
- try {
- solFile.close();
- } catch (Exception e) {
- // ignore
- }
- }
- }
- }
-
- private double[] readMBSData(String varName, Double time) throws Exception {
- FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
- FileFormat solFile = null;
- double[] data = null;
- try {
- solFile = fileFormat.createInstance(fileName, FileFormat.READ);
- solFile.open();
- DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode();
- Group rootGroup = (Group)rootNode.getUserObject();
- Group solutionGroup = null;
- for (Object member : rootGroup.getMemberList())
- {
- String memberName = ((HObject)member).getName();
- if (member instanceof Group)
- {
- MBSDataGroup group = MBSDataGroup.valueOf(memberName);
- if (group == MBSDataGroup.Solution)
- {
- solutionGroup = (Group) member;
- break;
- }
- }
- }
- if (solutionGroup == null)
- {
- throw new Exception("Group " + MBSDataGroup.Solution + " not found");
- }
-
- int varIndex = -1;
- int size = 0;
- for (int i = 0; i < dataBlockList.size(); ++ i)
- {
- DataBlock dataBlock = dataBlockList.get(i);
- if (dataBlock.getVarName().equals(varName))
- {
- varIndex = i;
- size = dataBlock.getSize();
- break;
- }
- }
-
- if (varIndex == -1)
- {
- throw new Exception("Variable " + varName + " not found");
- }
-
- // find time group for that time
- Group timeGroup = null;
- for (Object member : solutionGroup.getMemberList())
- {
- if (member instanceof Group)
- {
- Group group = (Group)member;
- List dsAttrList = group.getMetadata();
- Attribute timeAttribute = null;
- for (Attribute attr : dsAttrList)
- {
- if (attr.getName().equals(MSBDataAttribute.time.name()))
- {
- timeAttribute = attr;
- break;
- }
- }
- if (timeAttribute != null)
- {
- double t = ((double[]) timeAttribute.getValue())[0];
- if (Math.abs(t - time) < 1e-8)
- {
- timeGroup = group;
- break;
- }
- }
- }
- }
-
- if (timeGroup == null)
- {
- throw new Exception("No time group found for time=" + time);
- }
-
- // find variable dataset
- Dataset varDataset = null;
- for (Object member : timeGroup.getMemberList())
- {
- if (member instanceof Dataset)
- {
- List dsAttrList = ((Dataset)member).getMetadata();
- String var = null;
- for (Attribute attr : dsAttrList)
- {
- if (attr.getName().equals(MSBDataAttribute.name.name()))
- {
- var = ((String[]) attr.getValue())[0];
- break;
- }
- }
- if (var != null && var.equals(varName))
- {
- varDataset = (Dataset) member;
- break;
- }
- }
- }
- if (varDataset == null)
- {
- throw new Exception("Data for Variable " + varName + " at time " + time + " not found");
- }
-
- data = new double[size];
- System.arraycopy((double[])varDataset.getData(), 0, data, 0, size);
- return data;
- }
- finally
- {
- if (solFile != null)
- {
- try {
- solFile.close();
- } catch (Exception e) {
- // ignore
- }
- }
- }
- }
}
diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/Hdf5DataProcessingReaderNative.java b/vcell-core/src/main/java/cbit/vcell/simdata/Hdf5DataProcessingReaderNative.java
deleted file mode 100644
index e023c0c261..0000000000
--- a/vcell-core/src/main/java/cbit/vcell/simdata/Hdf5DataProcessingReaderNative.java
+++ /dev/null
@@ -1,696 +0,0 @@
-package cbit.vcell.simdata;
-
-import cbit.vcell.math.VariableType;
-import cbit.vcell.resource.NativeLib;
-import cbit.vcell.solver.AnnotatedFunction;
-import cbit.vcell.solver.Simulation;
-import ncsa.hdf.object.*;
-import ncsa.hdf.object.h5.H5ScalarDS;
-import org.vcell.util.Extent;
-import org.vcell.util.ISize;
-import org.vcell.util.Origin;
-import org.vcell.util.document.TSJobResultsNoStats;
-import org.vcell.util.document.TimeSeriesJobResults;
-import org.vcell.util.document.TimeSeriesJobSpec;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-
-import static cbit.vcell.simdata.SimDataConstants.*;
-
-
-public class Hdf5DataProcessingReaderNative {
-
-
- public DataOperationResults.DataProcessingOutputInfo getDataProcessingOutput(DataOperation.DataProcessingOutputInfoOP infoOP, File dataProcessingOutputFileHDF5) throws Exception {
- var outputInfo1 = (DataOperationResults.DataProcessingOutputInfo)getDataProcessingOutput_internal(infoOP, dataProcessingOutputFileHDF5);
- Hdf5DataProcessingReaderPure hdf5DataProcessingReaderPure = new Hdf5DataProcessingReaderPure();
- var outputInfo2 = hdf5DataProcessingReaderPure.getDataProcessingOutput(infoOP, dataProcessingOutputFileHDF5);
- if (outputInfo1.getVariableNames().length != outputInfo2.getVariableNames().length) {
- throw new Exception("Variable names length mismatch");
- }
- for (int i = 0; i < outputInfo1.getVariableNames().length; i++) {
- if (!outputInfo1.getVariableNames()[i].equals(outputInfo2.getVariableNames()[i])) {
- throw new Exception("Variable names mismatch");
- }
- }
- System.out.println("hello");
- return outputInfo1;
- }
-
-
- public DataOperationResults.DataProcessingOutputDataValues getDataProcessingOutput(DataOperation.DataProcessingOutputDataValuesOP dataValuesOp, File dataProcessingOutputFileHDF5) throws Exception {
- var values1 = (DataOperationResults.DataProcessingOutputDataValues)getDataProcessingOutput_internal(dataValuesOp, dataProcessingOutputFileHDF5);
- Hdf5DataProcessingReaderPure hdf5DataProcessingReaderPure = new Hdf5DataProcessingReaderPure();
- var values2 = hdf5DataProcessingReaderPure.getDataProcessingOutput(dataValuesOp, dataProcessingOutputFileHDF5);
- if (values1.getDataValues().length != values2.getDataValues().length) {
- throw new Exception("Data values length mismatch");
- }
- return values1;
- }
-
-
- public DataOperationResults.DataProcessingOutputTimeSeriesValues getDataProcessingOutput(DataOperation.DataProcessingOutputTimeSeriesOP timeSeriesOp, File dataProcessingOutputFileHDF5) throws Exception {
- var values1 = (DataOperationResults.DataProcessingOutputTimeSeriesValues)getDataProcessingOutput_internal(timeSeriesOp, dataProcessingOutputFileHDF5);
- Hdf5DataProcessingReaderPure hdf5DataProcessingReaderPure = new Hdf5DataProcessingReaderPure();
- var values2 = hdf5DataProcessingReaderPure.getDataProcessingOutput(timeSeriesOp, dataProcessingOutputFileHDF5);
- return values1;
- }
-
- private DataOperationResults getDataProcessingOutput_internal(DataOperation dataOperation, File dataProcessingOutputFileHDF5) throws Exception {
- NativeLib.HDF5.load();
- DataOperationResults dataProcessingOutputResults = null;
- FileFormat hdf5FileFormat = null;
- try{
- if (dataProcessingOutputFileHDF5.exists()) {
- // retrieve an instance of H5File
- FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
- if (fileFormat == null){
- throw new Exception("Cannot find HDF5 FileFormat.");
- }
- // open the file with read-only access
- hdf5FileFormat = fileFormat.open(dataProcessingOutputFileHDF5.getAbsolutePath(), FileFormat.READ);
- hdf5FileFormat.setMaxMembers(Simulation.MAX_LIMIT_SPATIAL_TIMEPOINTS);
- // open the file and retrieve the file structure
- hdf5FileFormat.open();
- Group root = (Group)((javax.swing.tree.DefaultMutableTreeNode)hdf5FileFormat.getRootNode()).getUserObject();
- if(dataOperation instanceof DataOperation.DataProcessingOutputInfoOP){
- DataSetControllerImpl.DataProcessingHelper dataProcessingHelper = new DataSetControllerImpl.DataProcessingHelper();
- iterateHDF5(root,"",dataProcessingHelper);
- dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputInfo(dataOperation.getVCDataIdentifier(),
- dataProcessingHelper.getVarNames(),
- dataProcessingHelper.getVarISizes(),
- dataProcessingHelper.times,
- dataProcessingHelper.getVarUnits(),
- dataProcessingHelper.getPostProcessDataTypes(),
- dataProcessingHelper.getVarOrigins(),
- dataProcessingHelper.getVarExtents(),
- dataProcessingHelper.getVarStatValues());
- //map function names to PostProcess state variable name
- ArrayList postProcessImageVarNames = new ArrayList();
- for (int i = 0; i < ((DataOperationResults.DataProcessingOutputInfo)dataProcessingOutputResults).getVariableNames().length; i++) {
- String variableName = ((DataOperationResults.DataProcessingOutputInfo)dataProcessingOutputResults).getVariableNames()[i];
- if(((DataOperationResults.DataProcessingOutputInfo)dataProcessingOutputResults).getPostProcessDataType(variableName).equals(DataOperationResults.DataProcessingOutputInfo.PostProcessDataType.image)){
- postProcessImageVarNames.add(variableName);
- }
- }
- HashMap mapFunctionNameToStateVarName = null;
- if(((DataOperation.DataProcessingOutputInfoOP)dataOperation).getOutputContext() != null){
- mapFunctionNameToStateVarName = new HashMap();
- for (int i = 0; i < ((DataOperation.DataProcessingOutputInfoOP)dataOperation).getOutputContext().getOutputFunctions().length; i++) {
- AnnotatedFunction annotatedFunction = ((DataOperation.DataProcessingOutputInfoOP)dataOperation).getOutputContext().getOutputFunctions()[i];
- if(annotatedFunction.getFunctionType().equals(VariableType.POSTPROCESSING)){
- String[] symbols = annotatedFunction.getExpression().flatten().getSymbols();
- //Find any PostProcess state var that matches a symbol in the function
- for (int j = 0; j < symbols.length; j++) {
- if(postProcessImageVarNames.contains(symbols[j])){
- mapFunctionNameToStateVarName.put(annotatedFunction.getName(), symbols[j]);
- break;
- }
- }
- }
- }
- }
- if(mapFunctionNameToStateVarName != null && mapFunctionNameToStateVarName.size() > 0){
- dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputInfo(((DataOperationResults.DataProcessingOutputInfo)dataProcessingOutputResults),mapFunctionNameToStateVarName);
- }
- }else{
- OutputContext outputContext = dataOperation.getOutputContext();
- String[] variableNames = null;
- DataOperation.DataProcessingOutputDataValuesOP.DataIndexHelper dataIndexHelper = null;
- DataOperation.DataProcessingOutputDataValuesOP.TimePointHelper timePointHelper = null;
- if(dataOperation instanceof DataOperation.DataProcessingOutputDataValuesOP){
- variableNames = new String[] {((DataOperation.DataProcessingOutputDataValuesOP)dataOperation).getVariableName()};
- dataIndexHelper = ((DataOperation.DataProcessingOutputDataValuesOP)dataOperation).getDataIndexHelper();
- timePointHelper = ((DataOperation.DataProcessingOutputDataValuesOP)dataOperation).getTimePointHelper();
- }else if(dataOperation instanceof DataOperation.DataProcessingOutputTimeSeriesOP){
- variableNames = ((DataOperation.DataProcessingOutputTimeSeriesOP)dataOperation).getTimeSeriesJobSpec().getVariableNames();
- TimeSeriesJobSpec timeSeriesJobSpec = ((DataOperation.DataProcessingOutputTimeSeriesOP)dataOperation).getTimeSeriesJobSpec();
- double[] specificTimepoints = extractTimeRange(((DataOperation.DataProcessingOutputTimeSeriesOP)dataOperation).getAllDatasetTimes(), timeSeriesJobSpec.getStartTime(), timeSeriesJobSpec.getEndTime());
- timePointHelper = DataOperation.DataProcessingOutputDataValuesOP.TimePointHelper.createSpecificTimePointHelper(specificTimepoints);
- timeSeriesJobSpec.initIndices();
- dataIndexHelper = DataOperation.DataProcessingOutputDataValuesOP.DataIndexHelper.createSpecificDataIndexHelper(timeSeriesJobSpec.getIndices()[0]);
- }else{
- throw new Exception("Unknown Dataoperation "+dataOperation.getClass().getName());
- }
- if(variableNames.length != 1){
- throw new Exception("Only 1 variable request at a time");
- }
- AnnotatedFunction[] annotatedFunctions = (outputContext==null?null:outputContext.getOutputFunctions());
- AnnotatedFunction foundFunction = null;
- if(annotatedFunctions != null){
- for (int i = 0; i < annotatedFunctions.length; i++) {
- if(annotatedFunctions[i].getName().equals(variableNames[0])){
- foundFunction = annotatedFunctions[i];
- break;
- }
- }
- }
- double[] alltimes = null;
- if(foundFunction != null){
- DataOperationResults.DataProcessingOutputInfo dataProcessingOutputInfo =
- getDataProcessingOutput(new DataOperation.DataProcessingOutputInfoOP(dataOperation.getVCDataIdentifier(),false,dataOperation.getOutputContext()), dataProcessingOutputFileHDF5);
- alltimes = dataProcessingOutputInfo.getVariableTimePoints();
- DataSetControllerImpl.FunctionHelper functionHelper = DataSetControllerImpl.getPostProcessStateVariables(foundFunction, dataProcessingOutputInfo);
- DataSetControllerImpl.DataProcessingHelper dataProcessingHelper = new DataSetControllerImpl.DataProcessingHelper(functionHelper.postProcessStateVars,timePointHelper,dataIndexHelper);
- iterateHDF5(root,"",dataProcessingHelper);
- dataProcessingOutputResults =
- DataSetControllerImpl.evaluatePostProcessFunction(dataProcessingOutputInfo, functionHelper.postProcessStateVars, dataProcessingHelper.specificDataValues,
- dataIndexHelper, timePointHelper, functionHelper.flattenedBoundExpression,variableNames[0]);
- }else{
- DataSetControllerImpl.DataProcessingHelper dataProcessingHelper =
- new DataSetControllerImpl.DataProcessingHelper(new String[] {variableNames[0]},timePointHelper,dataIndexHelper);
- iterateHDF5(root,"",dataProcessingHelper);
- alltimes = dataProcessingHelper.times;
- if(dataProcessingHelper.specificDataValues == null){
- throw new Exception("Couldn't find postprocess data as specified for var="+variableNames[0]);
- }
- dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputDataValues(dataOperation.getVCDataIdentifier(),
- variableNames[0],timePointHelper,dataIndexHelper, dataProcessingHelper.specificDataValues[0]);
- }
- if(dataOperation instanceof DataOperation.DataProcessingOutputTimeSeriesOP){
- TimeSeriesJobResults timeSeriesJobResults = null;
- DataOperation.DataProcessingOutputTimeSeriesOP dataProcessingOutputTimeSeriesOP = (DataOperation.DataProcessingOutputTimeSeriesOP)dataOperation;
- double[][] dataValues = ((DataOperationResults.DataProcessingOutputDataValues)dataProcessingOutputResults).getDataValues();//[time][data]
- double[] desiredTimes = (timePointHelper.isAllTimePoints()?alltimes:timePointHelper.getTimePoints());
- double[][][] timeSeriesFormatedValuesArr = new double[variableNames.length][dataIndexHelper.getDataIndexes().length+1][desiredTimes.length];
- for (int i = 0; i < timeSeriesFormatedValuesArr.length; i++) {//var
- for (int j = 0; j < timeSeriesFormatedValuesArr[i].length; j++) {//index
- if(j==0){
- timeSeriesFormatedValuesArr[i][j] = desiredTimes;
- continue;
- }
- for (int k = 0; k < timeSeriesFormatedValuesArr[i][j].length; k++) {//time
- //assume 1 variable for now
- timeSeriesFormatedValuesArr[i][j][k] = dataValues[k][j-1];
- }
- }
- }
-
- if(dataProcessingOutputTimeSeriesOP.getTimeSeriesJobSpec().isCalcSpaceStats()){
- DataSetControllerImpl.SpatialStatsInfo spatialStatsInfo = new DataSetControllerImpl.SpatialStatsInfo();
- spatialStatsInfo.bWeightsValid = false;
- timeSeriesJobResults =
- DataSetControllerImpl.calculateStatisticsFromWhole(dataProcessingOutputTimeSeriesOP.getTimeSeriesJobSpec(), timeSeriesFormatedValuesArr, timePointHelper.getTimePoints(), spatialStatsInfo);
- }else{
- timeSeriesJobResults =
- new TSJobResultsNoStats(
- variableNames,
- new int[][] {dataIndexHelper.getDataIndexes()},
- timePointHelper.getTimePoints(),
- timeSeriesFormatedValuesArr);
- }
- dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputTimeSeriesValues(dataOperation.getVCDataIdentifier(), timeSeriesJobResults);
- }
- }
- }else{
- throw new FileNotFoundException("Data Processing Output file '"+dataProcessingOutputFileHDF5.getPath()+"' not found");
- }
- }catch(Exception e){
- DataSetControllerImpl.lg.error(e.getMessage(), e);
- }finally{
- if(hdf5FileFormat != null){try{hdf5FileFormat.close();}catch(Exception e){
- DataSetControllerImpl.lg.error(e.getMessage(), e);}}
- }
-
- return dataProcessingOutputResults;
- }
-
- private static double[] extractTimeRange(double[] alltimes, double startTime, double stoptime){
- ArrayList selectedtimePointsList = new ArrayList();
- for (int i = 0; i < alltimes.length; i++) {
- if(alltimes[i] >= startTime && alltimes[i] <= stoptime){
- selectedtimePointsList.add(alltimes[i]);
- }
- }
- double[] selectedTimePoints = new double[selectedtimePointsList.size()];
- for (int j = 0; j < selectedtimePointsList.size(); j++) {
- selectedTimePoints[j] = selectedtimePointsList.get(j);
- }
- return selectedTimePoints;
- }
-
-
- private static void iterateHDF5(HObject hObject, String indent, DataSetControllerImpl.DataProcessingHelper dataProcessingHelper) throws Exception{
- if(hObject instanceof Group){
- Group group = ((Group)hObject);
- printInfo(group,indent);
- if(group.getName().equals("/") || group.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_POSTPROCESSING)){
- List postProcessMembers = ((Group)hObject).getMemberList();
- for(HObject nextHObject:postProcessMembers){
- iterateHDF5(nextHObject, indent+" ", dataProcessingHelper);
- }
- }else if(group.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_VARIABLESTATISTICS) && dataProcessingHelper.isInfoOnly()){
- populateStatNamesAndUnits(hObject, dataProcessingHelper);
- List statDataAtEachTime = group.getMemberList();
- dataProcessingHelper.statValues = new double[dataProcessingHelper.statVarNames.length][statDataAtEachTime.size()];
- for(HObject nextStatData:statDataAtEachTime){
- printInfo(nextStatData,indent+" ");
- processDims(nextStatData, dataProcessingHelper,false);//always get stats data when ask for info
- double[] stats = (double[])dataProcessingHelper.tempData;
- int timeIndex = Integer.parseInt(nextStatData.getName().substring("time".length()));
- for (int j = 0; j < stats.length; j++) {
- dataProcessingHelper.statValues[j][timeIndex] = stats[j];
- }
- }
- }else{//must be image data
- if(dataProcessingHelper.isInfoOnly()){
- dataProcessingHelper.imageNames = new ArrayList();
- dataProcessingHelper.imageISize = new ArrayList();
- dataProcessingHelper.imageOrigin = new ArrayList();
- dataProcessingHelper.imageExtent = new ArrayList();
- Origin imgDataOrigin;
- Extent imgDataExtent;
- HashMap attrHashMap = getHDF5Attributes(group);
- if(attrHashMap.size() == 2){
- imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), 0, 0);
- imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), 1, 1);//this is 1D, however the extentY, Z cannot take 0
- }
- else if(attrHashMap.size() == 4){
- imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), 0);
- imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), 1);//this is 2D, however the extentZ cannot take 0
- }
- else if(attrHashMap.size() == 6){
- imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINZ)));
- imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTZ)));
- }else{
- throw new Exception("Unexpected number of origin/extent values");
- }
- dataProcessingHelper.imageNames.add(hObject.getName());
- dataProcessingHelper.imageOrigin.add(imgDataOrigin);
- dataProcessingHelper.imageExtent.add(imgDataExtent);
- //get ISize
- processDims((H5ScalarDS)(((Group)hObject).getMemberList()).get(0), dataProcessingHelper,true);
- long[] dims = dataProcessingHelper.tempDims;
- ISize isize = new ISize((int)dims[0], (int)(dims.length>1?dims[1]:1), (int)(dims.length>2?dims[2]:1));
- dataProcessingHelper.imageISize.add(isize);
- }else{
- int currentVarNameIndex = -1;
- for (int i = 0; i < dataProcessingHelper.specificVarNames.length; i++) {
- if(group.getName().equals(dataProcessingHelper.specificVarNames[i])){
- currentVarNameIndex = i;
- break;
- }
- }
- if(currentVarNameIndex == -1){
- return;//skip this group
- }
- dataProcessingHelper.specificDataValues[currentVarNameIndex] = new double[(dataProcessingHelper.specificTimePointHelper.isAllTimePoints()?dataProcessingHelper.times.length:dataProcessingHelper.specificTimePointHelper.getTimePoints().length)][];
- List imageDataAtEachTime = ((Group)hObject).getMemberList();
- int foundTimePointIndex = 0;
- for(HObject nextImageData:imageDataAtEachTime){
-// if(dataProcessingHelper.isInfoOnly()){
-// printInfo(nextImageData,indent+" ");
-// processDims(nextImageData, dataProcessingHelper,true);
-// long[] dims = dataProcessingHelper.tempDims;
-// ISize isize = new ISize((int)dims[0], (int)(dims.length>1?dims[1]:1), (int)(dims.length>2?dims[2]:1));
-// dataProcessingHelper.imageISize.add(isize);
-// break;//only need 1st one for info
-// }else{
- int hdf5GroupTimeIndex = Integer.parseInt(nextImageData.getName().substring(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMEPREFIX.length()));
- if(dataProcessingHelper.specificTimePointHelper.isAllTimePoints() || dataProcessingHelper.specificTimePointHelper.getTimePoints()[foundTimePointIndex] == dataProcessingHelper.times[hdf5GroupTimeIndex]){
-
- int timeIndex = (dataProcessingHelper.specificTimePointHelper.isAllTimePoints()?hdf5GroupTimeIndex:foundTimePointIndex);
- processDims(nextImageData, dataProcessingHelper,false);
- long[] dims = dataProcessingHelper.tempDims;
- ISize isize = new ISize((int)dims[0], (int)(dims.length>1?dims[1]:1), (int)(dims.length>2?dims[2]:1));
- if(dataProcessingHelper.specificDataIndexHelper.isAllDataIndexes()){
- dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex] = (double[])dataProcessingHelper.tempData;
- }else if(dataProcessingHelper.specificDataIndexHelper.isSingleSlice()){
- dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex] = new double[isize.getX()*isize.getY()];
- System.arraycopy(
- (double[])dataProcessingHelper.tempData,dataProcessingHelper.specificDataIndexHelper.getSliceIndex()*(isize.getX()*isize.getY()),
- dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex], 0, isize.getX()*isize.getY());
- }else{
- dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex] = new double[dataProcessingHelper.specificDataIndexHelper.getDataIndexes().length];
- for (int i = 0; i < dataProcessingHelper.specificDataIndexHelper.getDataIndexes().length; i++) {
- dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex][i] = ((double[])dataProcessingHelper.tempData)[dataProcessingHelper.specificDataIndexHelper.getDataIndexes()[i]];
- }
- }
- foundTimePointIndex++;
- if(!dataProcessingHelper.specificTimePointHelper.isAllTimePoints() && foundTimePointIndex == dataProcessingHelper.specificTimePointHelper.getTimePoints().length){
- //break out after we get our data
- break;
- }
- }
-
-// }
- }
- }
- }
- }else if(hObject instanceof Dataset){
- Dataset dataset = (Dataset)hObject;
- printInfo(dataset,indent);
- if(dataset.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMES)){
- processDims(hObject, dataProcessingHelper,false);
- dataProcessingHelper.times = (double[])dataProcessingHelper.tempData;
- }
- }else if(hObject instanceof Datatype){
- printInfo(hObject, indent);
- }else{
- printInfo(hObject, indent);
- }
- }
- private static HashMap getHDF5Attributes(HObject hObject) throws Exception{
- HashMap attrHashMap = new HashMap();
- List metaDataL = hObject.getMetadata();
- if(metaDataL != null){
- for (int j = 0; j < metaDataL.size(); j++) {
- Attribute attr = (Attribute)metaDataL.get(j);
- String attrValue = attr.toString(",");
- //System.out.print(" "+attr.getName()+"='"+attrValue+"'");
- attrHashMap.put(attr.getName(),attr.toString(","));
- }
- }
- return attrHashMap;
- }
-
- private static void printInfo(HObject hObject,String indent) throws Exception{
- if(true){return;}
- System.out.println(indent+hObject.getName()+":"+hObject.getClass().getName());
- List metaDatas = hObject.getMetadata();
- for(Object metaData:metaDatas){
- if(metaData instanceof Attribute){
- Attribute attribute = (Attribute)metaData;
- System.out.println(indent+"metadata="+attribute.getName()+" "+attribute.getType().getDatatypeDescription());
- }else{
- System.out.println(indent+"metadata="+metaData.getClass().getName());
- }
- }
- }
- private static void processDims(HObject hObject, DataSetControllerImpl.DataProcessingHelper dataProcessingHelper, boolean bInfoOnly) throws Exception{
- H5ScalarDS h5ScalarDS = (H5ScalarDS)hObject;
- h5ScalarDS.init();
- dataProcessingHelper.tempDims = h5ScalarDS.getDims();
-
- //make sure all dimensions are selected for loading if 3D
- //note: for 3D, only 1st slice selected by default
- long[] selectedDims = h5ScalarDS.getSelectedDims();
- if(selectedDims != null && selectedDims.length > 2){
- //changes internal class variable used during read
- selectedDims[2] = dataProcessingHelper.tempDims[2];
- }
- if(!bInfoOnly){
- //load all data
- dataProcessingHelper.tempData = h5ScalarDS.read();
- }
-
- if(dataProcessingHelper.tempDims != null){
- if(dataProcessingHelper.tempDims.length > 1){
- //For HDF5View (x stored in index 1) and (y stored in index 0) so must switch back to normal assumption
- long dimsY = dataProcessingHelper.tempDims[0];
- dataProcessingHelper.tempDims[0] = dataProcessingHelper.tempDims[1];
- dataProcessingHelper.tempDims[1] = dimsY;
- }
-// //uncomment for Debug
-// System.out.print(" dims=(");
-// for (int j = 0; j < dataProcessingHelper.tempDims.length; j++) {
-// System.out.print((j>0?"x":"")+dataProcessingHelper.tempDims[j]);
-// }
-// System.out.print(")");
- }
- }
- private static void populateStatNamesAndUnits(HObject hObject, DataSetControllerImpl.DataProcessingHelper dataProcessingHelper) throws Exception{
- if(!hObject.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_VARIABLESTATISTICS)){
- throw new Exception("expecting obejct name "+SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_VARIABLESTATISTICS);
- }
- final String NAME_ATTR = "_name";
- final String UNIT_ATTR = "_unit";
- final String STAT_PREFIX = "comp_";
-
- List metaDataL = hObject.getMetadata();
- if(metaDataL != null){
- HashMap attrHashMap = getHDF5Attributes(hObject);//map contains the same number of names and attributes
- String[] variableStatNames = null;
- String[] variableUnits = null;
- Iterator attrIterTemp = attrHashMap.keySet().iterator();
- boolean bHasUnit = false;
- for (int j = 0; j < attrHashMap.size(); j++) {
- String compVal = attrIterTemp.next();
- if(compVal.contains(NAME_ATTR) || compVal.contains(UNIT_ATTR)){
- bHasUnit = true;
- break;
- }
- }
- if(bHasUnit){
- variableStatNames = new String[attrHashMap.size()/2];
- variableUnits = new String[attrHashMap.size()/2];
- }else{
- variableStatNames = new String[attrHashMap.size()]; // old way
- }
- Iterator attrIter = attrHashMap.keySet().iterator();
- for (int j = 0; j < attrHashMap.size(); j++) {
- String compVal = attrIter.next();
- if(compVal.contains(NAME_ATTR)){
- int compVarIdx = Integer.parseInt(compVal.substring(STAT_PREFIX.length(), compVal.indexOf('_', STAT_PREFIX.length())));
- variableStatNames[compVarIdx] = attrHashMap.get(compVal);
- }else if(compVal.contains(UNIT_ATTR)){
- int compVarIdx = Integer.parseInt(compVal.substring(STAT_PREFIX.length(), compVal.indexOf('_', STAT_PREFIX.length())));
- variableUnits[compVarIdx] = attrHashMap.get(compVal);
- }else{//old way for var names(e.g. comp_0 = abc) with no "_name" or "_unit"
- int compVarIdx = Integer.parseInt(compVal.substring(STAT_PREFIX.length()));
- variableStatNames[compVarIdx] = attrHashMap.get(compVal);
- }
- }
- dataProcessingHelper.statVarNames = variableStatNames;
- dataProcessingHelper.statVarUnits = variableUnits;
- }
- }
-
- //uncomment it for Debug
-//private static String DATASETNAME = "/";
-//enum H5O_type {
-// H5O_TYPE_UNKNOWN(-1), // Unknown object type
-// H5O_TYPE_GROUP(0), // Object is a group
-// H5O_TYPE_DATASET(1), // Object is a dataset
-// H5O_TYPE_NAMED_DATATYPE(2), // Object is a named data type
-// H5O_TYPE_NTYPES(3); // Number of different object types
-// private static final Map lookup = new HashMap();
-//
-// static {
-// for (H5O_type s : EnumSet.allOf(H5O_type.class))
-// lookup.put(s.getCode(), s);
-// }
-//
-// private int code;
-//
-// H5O_type(int layout_type) {
-// this.code = layout_type;
-// }
-//
-// public int getCode() {
-// return this.code;
-// }
-//
-// public static H5O_type get(int code) {
-// return lookup.get(code);
-// }
-//}
-//
-//public static void do_iterate(File hdfFile) {
-// int file_id = -1;
-//
-// // Open a file using default properties.
-// try {
-// file_id = H5.H5Fopen(hdfFile.getAbsolutePath(), HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
-// }
-// catch (Exception e) {
-// lg.error(e);
-// }
-//
-// // Begin iteration.
-// System.out.println("Objects in root group:");
-// try {
-// if (file_id >= 0) {
-// int count = (int)H5.H5Gn_members(file_id, DATASETNAME);
-// String[] oname = new String[count];
-// int[] otype = new int[count];
-// int[] ltype = new int[count];
-// long[] orefs = new long[count];
-// H5.H5Gget_obj_info_all(file_id, DATASETNAME, oname, otype, ltype, orefs, HDF5Constants.H5_INDEX_NAME);
-//
-// // Get type of the object and display its name and type.
-// for (int indx = 0; indx < otype.length; indx++) {
-// switch (H5O_type.get(otype[indx])) {
-// case H5O_TYPE_GROUP:
-// System.out.println(" Group: " + oname[indx]);
-// break;
-// case H5O_TYPE_DATASET:
-// System.out.println(" Dataset: " + oname[indx]);
-// break;
-// case H5O_TYPE_NAMED_DATATYPE:
-// System.out.println(" Datatype: " + oname[indx]);
-// break;
-// default:
-// System.out.println(" Unknown: " + oname[indx]);
-// }
-// }
-// }
-// }
-// catch (Exception e) {
-// lg.error(e);
-// }
-//
-// // Close the file.
-// try {
-// if (file_id >= 0)
-// H5.H5Fclose(file_id);
-// }
-// catch (Exception e) {
-// lg.error(e);
-// }
-//}
-
-//public static void populateHDF5(Group g, String indent,DataProcessingOutput0 dataProcessingOutput,boolean bVarStatistics,String imgDataName,Origin imgDataOrigin,Extent imgDataExtent) throws Exception
-//{
-// if (g == null)
-// return;
-//
-// List members = g.getMemberList();
-//
-// int n = members.size();
-// indent += " ";
-// HObject obj = null;
-//
-// String nameAtt = "_name";
-// String unitAtt = "_unit";
-// for (int i=0; i 2){
-// //changes internal class variable used during read
-// selectedDims[2] = dims[2];
-// }
-//
-// //load all data
-// Object data = h5ScalarDS.read();
-//
-// if(dims != null){
-// if(dims.length > 1){
-// //For HDF5View (x stored in index 1) and (y stored in index 0) so must switch back to normal assumption
-// long dimsY = dims[0];
-// dims[0] = dims[1];
-// dims[1] = dimsY;
-// }
-// //uncomment for Debug
-// /*System.out.print(" dims=(");
-// for (int j = 0; j < dims.length; j++) {
-// System.out.print((j>0?"x":"")+dims[j]);
-// }
-// System.out.print(")");*/
-// }
-//
-//// System.out.print(" len="+times.length);
-// if(obj.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMES)){
-// double[] times = (double[])data;
-// dataProcessingOutput.setTimes(times);
-// }else if(bVarStatistics){
-// double[] stats = (double[])data;
-// int timeIndex = Integer.parseInt(obj.getName().substring("time".length()));
-// for (int j = 0; j < stats.length; j++) {
-// dataProcessingOutput.getVariableStatValues()[j][timeIndex] = stats[j];
-// }
-// }else{
-// double min = ((double[])data)[0];
-// double max = min;
-// for (int j = 0; j < ((double[])data).length; j++) {
-// min = Math.min(min, ((double[])data)[j]);
-// max = Math.max(max, ((double[])data)[j]);
-// }
-// int xSize = (int)dims[0];
-// int ySize = (int)(dims.length>1?dims[1]:1);
-// int zSize = (int)(dims.length>2?dims[2]:1);
-// SourceDataInfo sourceDataInfo =
-// new SourceDataInfo(SourceDataInfo.RAW_VALUE_TYPE, (double[])data, (imgDataExtent==null?new Extent(1,1,1):imgDataExtent), (imgDataOrigin==null?null:imgDataOrigin), new Range(min, max), 0, xSize, 1, ySize, xSize, zSize, xSize*ySize);
-// Vector otherData = dataProcessingOutput.getDataGenerators().get(imgDataName);
-// int timeIndex = Integer.parseInt(obj.getName().substring(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMEPREFIX.length()));
-// otherData.add(sourceDataInfo);
-// if(otherData.size()-1 != timeIndex){
-// throw new Exception("Error HDF5 parse: added data index does not match timeIndex");
-// }
-// }
-// }else if (obj instanceof H5Group && !obj.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_POSTPROCESSING)){
-// bVarStatistics = false;
-// imgDataName = obj.getName();
-// dataProcessingOutput.getDataGenerators().put(imgDataName, new Vector());
-//
-// List metaDataL = obj.getMetadata();
-// if(metaDataL != null){//assume 6 attributes defining origin and extent
-// HashMap attrHashMap = getHDF5Attributes(obj);
-// if(attrHashMap.size() == 2){
-// imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), 0, 0);
-// imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), 1, 1);//this is 1D, however the extentY, Z cannot take 0
-// }
-// else if(attrHashMap.size() == 4){
-// imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), 0);
-// imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), 1);//this is 2D, however the extentZ cannot take 0
-// }
-// else if(attrHashMap.size() == 6){
-// imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINZ)));
-// imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTZ)));
-// }
-// }
-//
-// }
-// System.out.println();
-//
-// if (obj instanceof Group)
-// {
-// populateHDF5((Group)obj, indent,dataProcessingOutput,bVarStatistics,imgDataName,imgDataOrigin,imgDataExtent);
-// }
-// }
-//}
-
-}
diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/Hdf5Utils.java b/vcell-core/src/main/java/cbit/vcell/simdata/Hdf5Utils.java
deleted file mode 100644
index 6d26175d6f..0000000000
--- a/vcell-core/src/main/java/cbit/vcell/simdata/Hdf5Utils.java
+++ /dev/null
@@ -1,331 +0,0 @@
-package cbit.vcell.simdata;
-
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang.ArrayUtils;
-
-import cbit.vcell.math.Variable;
-import ncsa.hdf.hdf5lib.H5;
-import ncsa.hdf.hdf5lib.HDF5Constants;
-import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
-import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
-
-public class Hdf5Utils {
- private static final String HDF5_GROUP_SOLUTION = "/solution";
- private static final String HDF5_GROUP_EXTRAPOLATED_VOLUMES = "/extrapolated_volumes";
- private static final String HDF5_GROUP_DIRECTORY_SEPARATOR = "/";
-
- /**
- * Creates a relative path to the solution to the variable specified
- *
- * @param varName the name of the variable to path to.
- * @return the relative path
- */
- public static String getVarSolutionPath(String varName){
- return HDF5_GROUP_SOLUTION + HDF5_GROUP_DIRECTORY_SEPARATOR + Variable.getNameFromCombinedIdentifier(varName);
- }
-
- /**
- * Creates a relative path to the extrapolated values of a given variable name.
- *
- * @param varName name of the variable to path to
- * @return the relative path
- */
- public static String getVolVarExtrapolatedValuesPath(String varName){
- return HDF5_GROUP_EXTRAPOLATED_VOLUMES + HDF5_GROUP_DIRECTORY_SEPARATOR + "__" + Variable.getNameFromCombinedIdentifier(varName) + "_extrapolated__";
- }
-
- /**
- * Helper class to ensure HDF5 documents are closed properly.
- */
- public static class HDF5WriteHelper {
- /**
- * The id number of the hdf5 dataspace
- */
- public int hdf5DataSpaceID;
-
- /**
- * The id number of the hdf5 dataset
- */
- public int hdf5DatasetValuesID;
-
- /**
- * Construtor of te helper
- *
- * @param hdf5DataSpaceID The id number of the hdf5 dataspace
- * @param hdf5DatasetValuesID The id number of the hdf5 dataset
- */
- public HDF5WriteHelper(int hdf5DataSpaceID, int hdf5DatasetValuesID) {
- super();
- this.hdf5DataSpaceID = hdf5DataSpaceID;
- this.hdf5DatasetValuesID = hdf5DatasetValuesID;
- }
- /**
- * Closes the dataspace and dataset referenced in this helper class
- *
- * @throws HDF5LibraryException if the hdf5 dataset and/or dataspace was unable to be successfully closed
- */
- public void close() throws HDF5LibraryException {
- H5.H5Sclose(hdf5DataSpaceID);
- H5.H5Dclose(hdf5DatasetValuesID);
- }
- }
-
- /**
- * Creates a dataset at the specified hdf5 group
- * @param hdf5GroupID the group to place the dataset in
- * @param datasetName the name to give the dataset
- * @param dims n-dimentional sizes to give the dataset
- * @return a HDF5 Writer helper class to store the relevant values
- * @throws HDF5Exception if the hdf5 library encounters something unusual
- */
- public static HDF5WriteHelper createDataset(int hdf5GroupID,String datasetName,long[] dims) throws HDF5Exception{
- //Create dataset and return it, must be closed when finished
- long[] datasetDimensions = dims;
- int hdf5DataspaceIDValues = H5.H5Screate_simple(datasetDimensions.length, datasetDimensions, null);
- int hdf5DatasetIDValues = H5.H5Dcreate(hdf5GroupID, datasetName, HDF5Constants.H5T_NATIVE_DOUBLE, hdf5DataspaceIDValues,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
- return new HDF5WriteHelper(hdf5DataspaceIDValues,hdf5DatasetIDValues);
- }
-
- /**
- * Creates a new HDF5 group underneath an exisitng group / top of the hdf5 file
- *
- * @param hdf5GroupID the ID of the top-level hdf5 file or one of its subgroups
- * @param groupName the name of the group
- * @return the new group's ID number
- * @throws HDF5Exception if the hdf5 library encounters something unusual
- */
- public static int createGroup(int hdf5GroupID,String groupName) throws HDF5Exception{
- return H5.H5Gcreate(hdf5GroupID, (String)groupName,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
- }
-
- /**
- * Use the HDF5 hyperslab feature to copy data from one play to another (not sure how to use it though...)
- *
- * @param copyToDataSet
- * @param copyFromData
- * @param copyToStart
- * @param copyToLength
- * @param copyFromDims
- * @param copyFromStart
- * @param copyFromLength
- * @param dataspaceID
- * @throws NullPointerException
- * @throws IllegalArgumentException
- * @throws HDF5Exception
- */
- public static void copySlice(int copyToDataSet,double[] copyFromData,long[] copyToStart,long[] copyToLength,long[] copyFromDims,long[] copyFromStart,long[] copyFromLength,int dataspaceID) throws NullPointerException, IllegalArgumentException, HDF5Exception {
- int hdf5DataspaceIDSlice = H5.H5Screate_simple(copyFromDims.length, copyFromDims, null);
- //Select the generated sliceData to copy-from
- H5.H5Sselect_hyperslab(hdf5DataspaceIDSlice, HDF5Constants.H5S_SELECT_SET, copyFromStart, null, copyFromLength, null);
- //Select next section of destination to copy-to
- H5.H5Sselect_hyperslab(dataspaceID, HDF5Constants.H5S_SELECT_SET, copyToStart, null, copyToLength,null);
- //Copy from extracted sliceData to hdf5 file dataset
- H5.H5Dwrite_double(copyToDataSet, HDF5Constants.H5T_NATIVE_DOUBLE, hdf5DataspaceIDSlice, dataspaceID, HDF5Constants.H5P_DEFAULT, copyFromData);
- H5.H5Sselect_none(dataspaceID);
- H5.H5Sclose(hdf5DataspaceIDSlice);
- }
-
- /**
- * Insert an attribute at the specified group where the data are a single value
- *
- * @param hdf5GroupID the id of the group to apply the attribute to
- * @param attributeName name of the attribute
- * @param data the data to place
- * @throws NullPointerException (unsure how this occurs)
- * @throws HDF5Exception if the hdf5 library encounters something unusual
- */
- public static void insertAttribute(int hdf5GroupID,String attributeName,String data) throws NullPointerException, HDF5Exception {
- //insertAttributes(hdf5GroupID, dataspaceName, new ArrayList(Arrays.asList(new String[] {data})));
- //String[] attr = data.toArray(new String[0]);
-
- String attr = data + '\u0000';
-
- //https://support.hdfgroup.org/ftp/HDF5/examples/misc-examples/vlstra.c
- int h5attrcs1 = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
- H5.H5Tset_size (h5attrcs1, attr.length() /*HDF5Constants.H5T_VARIABLE*/);
- int dataspace_id = -1;
- //dataspace_id = H5.H5Screate_simple(dims.length, dims,null);
- dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
- int attribute_id = H5.H5Acreate(hdf5GroupID, attributeName, h5attrcs1, dataspace_id, HDF5Constants.H5P_DEFAULT,HDF5Constants.H5P_DEFAULT);
- H5.H5Awrite(attribute_id, h5attrcs1, attr.getBytes());
- H5.H5Sclose(dataspace_id);
- H5.H5Aclose(attribute_id);
- H5.H5Tclose(h5attrcs1);
- }
-
- /**
- * Insert an attribute at the specified group where the data are multiple values
- *
- * @param hdf5GroupID the id of the group to apply the attribute to
- * @param attributeName name of the attribute
- * @param data the data to place
- * @throws NullPointerException (unsure how this occurs)
- * @throws HDF5Exception if the hdf5 library encounters something unusual
- */
- public static void insertAttributes(int hdf5GroupID,String attributeName,List data) throws NullPointerException, HDF5Exception {
- String[] attr = data.toArray(new String[0]);
- long[] dims = new long[] {attr.length}; // Always an array of length == 1
- StringBuffer sb = new StringBuffer();
- int MAXSTRSIZE= -1;
-
- // Get the max length of all the data strings
- for(int i=0;i(Arrays.asList(new String[] {data})));
-// }
-
- /**
- * Insert a dataset at the specififed group where the data are strings
- *
- * @param hdf5GroupID the id of the group to apply the dataset to
- * @param datasetName name of the dataset
- * @param dims dimentional meansurements
- * @param data the data to fill the dataset
- * @throws NullPointerException (unsure how this occurs)
- * @throws HDF5Exception if the hdf5 library encounters something unusual
- */
- public static void insertStrings(int hdf5GroupID,String datasetName,long[] dims,List data) throws NullPointerException, HDF5Exception {
- int largestStrLen = 0;
- for(int i=0;i data) throws NullPointerException, HDF5Exception {
- double[] hdfData = ArrayUtils.toPrimitive(((ArrayList)data).toArray(new Double[0]));
- int hdf5DataspaceID = H5.H5Screate_simple(dims.length, dims, null);
- int hdf5DatasetID = H5.H5Dcreate(hdf5GroupID, dataspaceName,HDF5Constants.H5T_NATIVE_DOUBLE, hdf5DataspaceID,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
- H5.H5Dwrite_double(hdf5DatasetID, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, hdfData);
- H5.H5Dclose(hdf5DatasetID);
- H5.H5Sclose(hdf5DataspaceID);
- }
-}
diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/MovingBoundarySimDataReader.java b/vcell-core/src/main/java/cbit/vcell/simdata/MovingBoundarySimDataReader.java
new file mode 100644
index 0000000000..651a6617cb
--- /dev/null
+++ b/vcell-core/src/main/java/cbit/vcell/simdata/MovingBoundarySimDataReader.java
@@ -0,0 +1,247 @@
+package cbit.vcell.simdata;
+
+import cbit.vcell.math.VariableType;
+import cbit.vcell.solvers.CartesianMeshMovingBoundary;
+import ncsa.hdf.object.*;
+
+import javax.swing.tree.DefaultMutableTreeNode;
+import java.util.List;
+import java.util.Vector;
+
+public class MovingBoundarySimDataReader {
+ public static void readMBSDataMetadata(String fileName, Vector dataBlockList) throws Exception
+ {
+ FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
+ FileFormat solFile = null;
+ try {
+ solFile = fileFormat.createInstance(fileName, FileFormat.READ);
+ solFile.open();
+ DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode();
+ Group rootGroup = (Group)rootNode.getUserObject();
+ Group solutionGroup = null;
+ for (Object member : rootGroup.getMemberList())
+ {
+ String memberName = ((HObject)member).getName();
+ if (member instanceof Group)
+ {
+ CartesianMeshMovingBoundary.MBSDataGroup group = CartesianMeshMovingBoundary.MBSDataGroup.valueOf(memberName);
+ if (group == CartesianMeshMovingBoundary.MBSDataGroup.Solution)
+ {
+ solutionGroup = (Group) member;
+ break;
+ }
+ }
+ }
+ if (solutionGroup == null)
+ {
+ throw new Exception("Group " + CartesianMeshMovingBoundary.MBSDataGroup.Solution + " not found");
+ }
+
+ // find any timeGroup
+ Group timeGroup = null;
+ for (Object member : solutionGroup.getMemberList())
+ {
+ String memberName = ((HObject)member).getName();
+ if (member instanceof Group && memberName.startsWith("time"))
+ {
+ timeGroup = (Group) member;
+ break;
+ }
+ }
+
+ if (timeGroup == null)
+ {
+ throw new Exception("No time group found");
+ }
+
+ // find all the datasets in that time group
+ for (Object member : timeGroup.getMemberList())
+ {
+ if (member instanceof Dataset)
+ {
+ List solAttrList = ((Dataset)member).getMetadata();
+ int size = 0;
+ String varName = null;
+ VariableType varType = null;
+ for (Attribute attr : solAttrList)
+ {
+ String attrName = attr.getName();
+ Object attrValue = attr.getValue();
+ if(attrName.equals(CartesianMeshMovingBoundary.MSBDataAttribute.name.name()))
+ {
+ varName = ((String[]) attrValue)[0];
+ }
+ else if (attrName.equals(CartesianMeshMovingBoundary.MSBDataAttribute.size.name()))
+ {
+ size = ((int[]) attrValue)[0];
+ }
+ else if (attrName.equals(CartesianMeshMovingBoundary.MSBDataAttribute.type.name()))
+ {
+ String vt = ((String[]) attrValue)[0];
+ if (vt.equals(CartesianMeshMovingBoundary.MSBDataAttributeValue.Point.name()))
+ {
+ varType = VariableType.POINT_VARIABLE;
+ }
+ else if (vt.equals(CartesianMeshMovingBoundary.MSBDataAttributeValue.Volume.name()))
+ {
+ varType = VariableType.VOLUME;
+ }
+ else if (vt.equals(CartesianMeshMovingBoundary.MSBDataAttributeValue.PointSubDomain.name()))
+ {
+ // Position for PointSubdomain
+ }
+ }
+ }
+ if (varType == VariableType.VOLUME)
+ {
+ // only display volume
+ dataBlockList.addElement(DataBlock.createDataBlock(varName, varType.getType(), size, 0));
+ }
+ if (varType == VariableType.POINT_VARIABLE)
+ {
+ // only display volume
+ dataBlockList.addElement(DataBlock.createDataBlock(varName, varType.getType(), size, 0));
+ }
+
+ }
+ }
+ }
+ finally
+ {
+ if (solFile != null)
+ {
+ try {
+ solFile.close();
+ } catch (Exception e) {
+ // ignore
+ }
+ }
+ }
+ }
+
+ public static double[] readMBSData(String fileName, Vector dataBlockList, String varName, Double time) throws Exception {
+ FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
+ FileFormat solFile = null;
+ double[] data = null;
+ try {
+ solFile = fileFormat.createInstance(fileName, FileFormat.READ);
+ solFile.open();
+ DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode();
+ Group rootGroup = (Group)rootNode.getUserObject();
+ Group solutionGroup = null;
+ for (Object member : rootGroup.getMemberList())
+ {
+ String memberName = ((HObject)member).getName();
+ if (member instanceof Group)
+ {
+ CartesianMeshMovingBoundary.MBSDataGroup group = CartesianMeshMovingBoundary.MBSDataGroup.valueOf(memberName);
+ if (group == CartesianMeshMovingBoundary.MBSDataGroup.Solution)
+ {
+ solutionGroup = (Group) member;
+ break;
+ }
+ }
+ }
+ if (solutionGroup == null)
+ {
+ throw new Exception("Group " + CartesianMeshMovingBoundary.MBSDataGroup.Solution + " not found");
+ }
+
+ int varIndex = -1;
+ int size = 0;
+ for (int i = 0; i < dataBlockList.size(); ++ i)
+ {
+ DataBlock dataBlock = dataBlockList.get(i);
+ if (dataBlock.getVarName().equals(varName))
+ {
+ varIndex = i;
+ size = dataBlock.getSize();
+ break;
+ }
+ }
+
+ if (varIndex == -1)
+ {
+ throw new Exception("Variable " + varName + " not found");
+ }
+
+ // find time group for that time
+ Group timeGroup = null;
+ for (Object member : solutionGroup.getMemberList())
+ {
+ if (member instanceof Group)
+ {
+ Group group = (Group)member;
+ List dsAttrList = group.getMetadata();
+ Attribute timeAttribute = null;
+ for (Attribute attr : dsAttrList)
+ {
+ if (attr.getName().equals(CartesianMeshMovingBoundary.MSBDataAttribute.time.name()))
+ {
+ timeAttribute = attr;
+ break;
+ }
+ }
+ if (timeAttribute != null)
+ {
+ double t = ((double[]) timeAttribute.getValue())[0];
+ if (Math.abs(t - time) < 1e-8)
+ {
+ timeGroup = group;
+ break;
+ }
+ }
+ }
+ }
+
+ if (timeGroup == null)
+ {
+ throw new Exception("No time group found for time=" + time);
+ }
+
+ // find variable dataset
+ Dataset varDataset = null;
+ for (Object member : timeGroup.getMemberList())
+ {
+ if (member instanceof Dataset)
+ {
+ List dsAttrList = ((Dataset)member).getMetadata();
+ String var = null;
+ for (Attribute attr : dsAttrList)
+ {
+ if (attr.getName().equals(CartesianMeshMovingBoundary.MSBDataAttribute.name.name()))
+ {
+ var = ((String[]) attr.getValue())[0];
+ break;
+ }
+ }
+ if (var != null && var.equals(varName))
+ {
+ varDataset = (Dataset) member;
+ break;
+ }
+ }
+ }
+ if (varDataset == null)
+ {
+ throw new Exception("Data for Variable " + varName + " at time " + time + " not found");
+ }
+
+ data = new double[size];
+ System.arraycopy((double[])varDataset.getData(), 0, data, 0, size);
+ return data;
+ }
+ finally
+ {
+ if (solFile != null)
+ {
+ try {
+ solFile.close();
+ } catch (Exception e) {
+ // ignore
+ }
+ }
+ }
+ }
+
+}
diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/MultiTrialNonspatialStochSimDataReader.java b/vcell-core/src/main/java/cbit/vcell/simdata/MultiTrialNonspatialStochSimDataReader.java
new file mode 100644
index 0000000000..97b19011e3
--- /dev/null
+++ b/vcell-core/src/main/java/cbit/vcell/simdata/MultiTrialNonspatialStochSimDataReader.java
@@ -0,0 +1,127 @@
+package cbit.vcell.simdata;
+
+import cbit.vcell.parser.ExpressionException;
+import cbit.vcell.solver.ode.ODESimData;
+import com.google.common.io.Files;
+import ncsa.hdf.object.FileFormat;
+import ncsa.hdf.object.Group;
+import ncsa.hdf.object.HObject;
+import ncsa.hdf.object.h5.H5ScalarDS;
+import org.vcell.util.ObjectNotFoundException;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.List;
+
+public class MultiTrialNonspatialStochSimDataReader {
+
+ public static double[] extractColumn(ODESimData odeSimData, String columnName, SummaryStatisticType summaryStatisticType) throws ExpressionException, ObjectNotFoundException {
+ FileFormat hdf5FileFormat = null;
+ File to = null;
+ try {
+ byte[] hdf5FileBytes = odeSimData.getHdf5FileBytes();
+ if(hdf5FileBytes != null) {
+ to = File.createTempFile("multitrial_nonspatial_stats_", ".hdf5");
+ Files.write(hdf5FileBytes, to);
+ FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
+ if (fileFormat == null){
+ throw new Exception("Cannot find HDF5 FileFormat.");
+ }
+ // open the file with read-only access
+ hdf5FileFormat = fileFormat.createInstance(to.getAbsolutePath(), FileFormat.READ);
+ // open the file and retrieve the file structure
+ hdf5FileFormat.open();
+ Group root = (Group)((javax.swing.tree.DefaultMutableTreeNode)hdf5FileFormat.getRootNode()).getUserObject();
+ List postProcessMembers = ((Group)root).getMemberList();
+ for(HObject nextHObject : postProcessMembers) {
+ System.out.println(nextHObject.getName()+" "+nextHObject.getClass().getName());
+ H5ScalarDS h5ScalarDS = (H5ScalarDS)nextHObject;
+ h5ScalarDS.init();
+ try {
+ long[] dims = h5ScalarDS.getDims();
+ System.out.println("---"+nextHObject.getName()+" "+nextHObject.getClass().getName()+" Dimensions="+Arrays.toString(dims));
+ Object obj = h5ScalarDS.read();
+ if(dims.length == 2) {
+ double[] columns = new double[(int)dims[1]];
+ for(int row=0;row postProcessMembers = ((Group)root).getMemberList();
+ for(HObject nextHObject:postProcessMembers){
+ //System.out.println(nextHObject.getName()+"\n"+nextHObject.getClass().getName());
+ H5ScalarDS h5ScalarDS = (H5ScalarDS)nextHObject;
+ h5ScalarDS.init();
+ try {
+ long[] dims = h5ScalarDS.getDims();
+ System.out.println("---"+nextHObject.getName()+" "+nextHObject.getClass().getName()+" Dimensions="+ Arrays.toString(dims));
+ Object obj = h5ScalarDS.read();
+ if(dims.length == 2) {
+ //dims[0]=numTimes (will be the same as 'SimTimes' data length)
+ //dims[1]=numVars (will be the same as 'VarNames' data length)
+ //if name='StatMean' this is the same as the default data saved in the odeSolverresultSet
+ double[] columns = new double[(int)dims[1]];
+ for(int row=0;row= times.length) {
- close();
- }
-}
-
-/**
+ /**
* Insert the method's description here.
* Creation date: (10/26/2004 10:18:50 AM)
*/
public void getNextDataAtCurrentTime(double[][] returnValues) throws IOException, DataAccessException {
if (isChombo) {
try {
- getNextDataAtCurrentTimeChombo(returnValues);
+ if (zipFilenNames == null || zipFilenNames[masterTimeIndex] == null) {
+ return;
+ }
+ if (currentZipFile == null || !currentZipFileName.equals(zipFilenNames[masterTimeIndex])) {
+ close();
+ currentZipFile = new ZipFile(zipFilenNames[masterTimeIndex]);
+ currentZipFileName=zipFilenNames[masterTimeIndex];
+ }
+ ChomboSimDataReader.getNextDataAtCurrentTimeChombo(returnValues, currentZipFile, varNames, varIndexes, simDataFileNames, masterTimeIndex);
+ ++ masterTimeIndex;
+ if (masterTimeIndex >= times.length) {
+ close();
+ }
} catch (Exception e) {
throw new DataAccessException(e.getMessage(), e);
}
diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/SimulationData.java b/vcell-core/src/main/java/cbit/vcell/simdata/SimulationData.java
index be8890f214..f1f9b19208 100644
--- a/vcell-core/src/main/java/cbit/vcell/simdata/SimulationData.java
+++ b/vcell-core/src/main/java/cbit/vcell/simdata/SimulationData.java
@@ -792,6 +792,7 @@ else if (odeIdentifier.equals(NFSIM_DATA_IDENTIFIER))
ODEDataInfo odeDataInfo = new ODEDataInfo(vcDataId.getOwner(), vcDataId.getID(), lastModified);
VCAssert.assertFalse(odeSimData == null, "should have returned null already");
byte[] hdf5FileBytes = null;
+ // try to open non-spatial stochastic MultiTrialStats HDF5 file
File hdf5File = new File(file.getParent(),file.getName()+"_hdf5");
if(hdf5File.exists()) {
hdf5FileBytes = Files.readAllBytes(hdf5File.toPath());
@@ -1611,7 +1612,7 @@ private synchronized void readMesh(File meshFile,File membraneMeshMetricsFile) t
// read meshFile,MembraneMeshMetrics and parse into 'mesh' object
//
if(isChombo()){
-// SimulationDataSpatialHdf5 simulationDataSpatialHdf5 = new SimulationDataSpatialHdf5(vcDataId,userDirectory,null);
+// ChomboSimpleSimDataReader_NotUsed simulationDataSpatialHdf5 = new ChomboSimpleSimDataReader_NotUsed(vcDataId,userDirectory,null);
// simulationDataSpatialHdf5.readVarAndFunctionDataIdentifiers();
mesh = CartesianMeshChombo.readMeshFile(meshFile);
// test serialization
@@ -2016,7 +2017,7 @@ public synchronized SimDataBlock getChomboExtrapolatedValues(String varName, dou
throw new DataAccessException("data not found for variable " + varName);
}
final String varNameInDataSet = dsi.getQualifiedName();
- double data[] = DataSet.readChomboExtrapolatedValues(varNameInDataSet, pdeFile, zipFile);
+ double data[] = ChomboSimDataReader.readChomboExtrapolatedValues(varNameInDataSet, pdeFile, zipFile);
VariableType variableType = VariableType.MEMBRANE;
PDEDataInfo pdeDataInfo = new PDEDataInfo(vcDataId.getOwner(),vcDataId.getID(),varName,time,lastModified);
return data == null ? null : new SimDataBlock(pdeDataInfo,data,variableType);
diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/SummaryStatisticType.java b/vcell-core/src/main/java/cbit/vcell/simdata/SummaryStatisticType.java
new file mode 100644
index 0000000000..62049d6a37
--- /dev/null
+++ b/vcell-core/src/main/java/cbit/vcell/simdata/SummaryStatisticType.java
@@ -0,0 +1,8 @@
+package cbit.vcell.simdata;
+
+public enum SummaryStatisticType {
+ Min,
+ Max,
+ Mean,
+ Std
+}
diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/UiTableExporterToHDF5.java b/vcell-core/src/main/java/cbit/vcell/simdata/UiTableExporterToHDF5.java
new file mode 100644
index 0000000000..429e06feb4
--- /dev/null
+++ b/vcell-core/src/main/java/cbit/vcell/simdata/UiTableExporterToHDF5.java
@@ -0,0 +1,360 @@
+package cbit.vcell.simdata;
+
+import cbit.vcell.math.ReservedVariable;
+import ncsa.hdf.hdf5lib.H5;
+import ncsa.hdf.hdf5lib.HDF5Constants;
+import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
+import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.ListIterator;
+
+public class UiTableExporterToHDF5 {
+ public static File exportTableToHDF5(boolean bHistogram, String blankCellValue, int[] columns, int[] rows, String xVarColumnName, String hdf5DescriptionText, String[] columnNames, String[] paramScanParamNames, Double[][] paramScanParamValues, Object[][] rowColValues) throws Exception {
+ int hdf5FileID = -1;//Used if HDF5 format
+ File hdf5TempFile = null;
+ try {
+ hdf5TempFile = File.createTempFile("plot2D", ".hdf");
+ //System.out.println("/home/vcell/Downloads/hdf5/HDFView/bin/HDFView "+hdf5TempFile.getAbsolutePath());
+ hdf5FileID = H5.H5Fcreate(hdf5TempFile.getAbsolutePath(), HDF5Constants.H5F_ACC_TRUNC,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ ArrayList> paramScanJobs = new ArrayList<>();
+ if(!bHistogram && !columnNames[0].equals((xVarColumnName==null? ReservedVariable.TIME.getName():xVarColumnName))) {
+ throw new Exception("Expecting first column in table to have name '"+xVarColumnName+"'");
+ }
+ //Add arraylist for the parameter scan job, add the index of the xval column
+ for(int i=0;i tempAL = new ArrayList();
+ paramScanJobs.add(tempAL);
+ break;
+ } else if(columnNames[i].equals((xVarColumnName==null?ReservedVariable.TIME.getName():xVarColumnName))){
+ if(i==0) {
+ ArrayList tempAL = new ArrayList();
+ tempAL.add(i);
+ paramScanJobs.add(tempAL);
+ }else {
+ String str1 = columnNames[i-1];
+ int str1Index = str1.lastIndexOf("Set ");
+ String str2 = columnNames[i+1];
+ int str2Index = str2.lastIndexOf("Set ");
+ if(!str1.substring(str1Index).equals(str2.substring(str2Index))) {
+ ArrayList tempAL = new ArrayList();
+ tempAL.add(i);
+ paramScanJobs.add(tempAL);
+ }
+ }
+ }
+ }
+ //Add selected columns to the proper paramscan arraylist
+ for(int j = 0; j< columns.length; j++) {
+ if(bHistogram) {
+ paramScanJobs.get(0).add(columns[j]);
+ }else {
+ if(columnNames[columns[j]].equals((xVarColumnName==null?ReservedVariable.TIME.getName():xVarColumnName))){
+ continue;//skip xcolumns
+ }
+ for(int k=0;k= paramScanJobs.get(k).get(0) && ((k+1) == paramScanJobs.size() || columns[j] < paramScanJobs.get(k+1).get(0))) {
+ paramScanJobs.get(k).add(columns[j]);
+// System.out.println("HDF5frm"+columnNames[columns[j]));
+ }
+ }
+ }
+ }
+ //Remove unselected indexes from set lists
+ for(int k=0;k listIterator = paramScanJobs.get(k).listIterator();
+ if(paramScanJobs.get(k).size() > 1) {// keep x val is there more selections for this set
+ listIterator.next();
+ }
+ while(listIterator.hasNext()) {
+ final Integer columIndex = listIterator.next();
+ boolean bFound = false;
+ for(int j = 0; j< columns.length; j++) {
+ if(columIndex == columns[j]) {
+ bFound = true;
+ break;
+ }
+ }
+ if(!bFound) {
+ listIterator.remove();
+ }
+ }
+ }
+ //Write out the data to HDF5 file
+ for(int k=0;k dataTypes = new ArrayList();
+ ArrayList dataIDs = new ArrayList();
+ ArrayList dataShapes = new ArrayList();
+ ArrayList dataLabels = new ArrayList();
+ ArrayList dataNames = new ArrayList();
+ ArrayList paramNames = new ArrayList();
+ ArrayList paramValues = new ArrayList();
+ boolean bParamsDone = false;
+ for(int cols=0;cols(Arrays.asList(new String[] {data})));
+ //String[] attr = data.toArray(new String[0]);
+
+ String attr = data + '\u0000';
+
+ //https://support.hdfgroup.org/ftp/HDF5/examples/misc-examples/vlstra.c
+ int h5attrcs1 = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ H5.H5Tset_size (h5attrcs1, attr.length() /*HDF5Constants.H5T_VARIABLE*/);
+ int dataspace_id = -1;
+ //dataspace_id = H5.H5Screate_simple(dims.length, dims,null);
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ int attribute_id = H5.H5Acreate(hdf5GroupID, attributeName, h5attrcs1, dataspace_id, HDF5Constants.H5P_DEFAULT,HDF5Constants.H5P_DEFAULT);
+ H5.H5Awrite(attribute_id, h5attrcs1, attr.getBytes());
+ H5.H5Sclose(dataspace_id);
+ H5.H5Aclose(attribute_id);
+ H5.H5Tclose(h5attrcs1);
+ }
+
+ /**
+ * Insert an attribute at the specified group where the data are multiple values
+ *
+ * @param hdf5GroupID the id of the group to apply the attribute to
+ * @param attributeName name of the attribute
+ * @param data the data to place
+ * @throws NullPointerException (unsure how this occurs)
+ * @throws HDF5Exception if the hdf5 library encounters something unusual
+ */
+ private static void insertAttributes(int hdf5GroupID,String attributeName,List data) throws NullPointerException, HDF5Exception {
+ String[] attr = data.toArray(new String[0]);
+ long[] dims = new long[] {attr.length}; // Always an array of length == 1
+ StringBuffer sb = new StringBuffer();
+ int MAXSTRSIZE= -1;
+
+ // Get the max length of all the data strings
+ for(int i=0;i
* Single value primitives should be retrieved by requesting array and verifying it's a single element
@@ -17,9 +17,9 @@
* @param type of returned data. primitives not supported, autoboxing not supported
* @author GWeatherby
*/
-public class VH5TypedPath extends VH5Path {
+public class MovingBoundardyVH5TypedPath extends MovingBoundardyVH5Path {
- public VH5TypedPath(Group g, Class clzz, String... names){
+ public MovingBoundardyVH5TypedPath(Group g, Class clzz, String... names){
super(g, names);
Objects.requireNonNull(clzz);
if(clzz.isPrimitive()){
diff --git a/vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundaryReader.java b/vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundaryReader.java
index 8d6604da49..238e85e756 100644
--- a/vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundaryReader.java
+++ b/vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundaryReader.java
@@ -96,7 +96,7 @@ public int lastTimeIndex(){
void testquery(){
try {
-// VH5TypedPath path = new VH5TypedPath<>(root, H5ScalarDS.class,"boundaries");
+// MovingBoundardyVH5TypedPath path = new MovingBoundardyVH5TypedPath<>(root, H5ScalarDS.class,"boundaries");
// H5ScalarDS hsd = path.get();
// hsd.init( );
// int[] si = hsd.getSelectedIndex();
@@ -107,17 +107,17 @@ void testquery(){
// sdims[0] = 1;
// Object o2 = hsd.read();
// System.out.println(o2);
-// VH5TypedPath dpath = new VH5TypedPath<>(root, String[].class,"boundaries");
+// MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath<>(root, String[].class,"boundaries");
// String[] d = dpath.get();
// System.out.println(d);
-// VH5Path path = new VH5Path(root,"generationTimes");
+// MovingBoundardyVH5Path path = new MovingBoundardyVH5Path(root,"generationTimes");
// Object o = path.getData();
// H5ScalarDS hsd = (H5ScalarDS) o;
// Object o2 = hsd.read();
// System.out.println(o2);
-// VH5TypedPath dpath = new VH5TypedPath(root, H5CompoundDS.class,"elements");
+// MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath(root, H5CompoundDS.class,"elements");
// H5CompoundDS cds = dpath.get();
// cds.init();
// selectPlane(cds,50,50,0);
@@ -127,7 +127,7 @@ void testquery(){
// int id = dts[0].open();
// o = cds.getData( );
//
-// //VH5Path path2 = new VH5Path(root,"elements","volumePointsX");
+// //MovingBoundardyVH5Path path2 = new MovingBoundardyVH5Path(root,"elements","volumePointsX");
// // o = path2.getData();
// System.out.println(o);
//
@@ -141,40 +141,40 @@ void testquery(){
}
private double[] getDoubleArray(String... names){
- VH5TypedPath dpath = new VH5TypedPath(root, double[].class, names);
+ MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath(root, double[].class, names);
return dpath.get();
}
private double singleDouble(String... names){
double[] a = getDoubleArray(names);
if(a.length != 1){
- throw new MovingBoundaryResultException(VH5Path.concat(names) + " is not single element array");
+ throw new MovingBoundaryResultException(MovingBoundardyVH5Path.concat(names) + " is not single element array");
}
return a[0];
}
private long[] getLongArray(String... names){
- VH5TypedPath dpath = new VH5TypedPath(root, long[].class, names);
+ MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath(root, long[].class, names);
return dpath.get();
}
private long singleLong(String... names){
long[] a = getLongArray(names);
if(a.length != 1){
- throw new MovingBoundaryResultException(VH5Path.concat(names) + " is not single element array");
+ throw new MovingBoundaryResultException(MovingBoundardyVH5Path.concat(names) + " is not single element array");
}
return a[0];
}
private int[] getIntArray(String... names){
- VH5TypedPath dpath = new VH5TypedPath(root, int[].class, names);
+ MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath(root, int[].class, names);
return dpath.get();
}
private int singleInt(String... names){
int[] a = getIntArray(names);
if(a.length != 1){
- throw new MovingBoundaryResultException(VH5Path.concat(names) + " is not single element array");
+ throw new MovingBoundaryResultException(MovingBoundardyVH5Path.concat(names) + " is not single element array");
}
return a[0];
}
@@ -400,10 +400,10 @@ private class PlaneNodes {
final H5CompoundDS species;
PlaneNodes() throws Exception{
- VH5TypedPath dpath = new VH5TypedPath(root, H5CompoundDS.class, "elements");
+ MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath(root, H5CompoundDS.class, "elements");
elements = dpath.get();
elements.read();
- dpath = new VH5TypedPath(root, H5CompoundDS.class, "species");
+ dpath = new MovingBoundardyVH5TypedPath(root, H5CompoundDS.class, "species");
species = dpath.get();
species.read();
}
@@ -441,7 +441,7 @@ public int[] getBoundaryIndexes(int timeIndex){
VCAssert.assertTrue(timeIndex >= 0, "negative time index");
validateTimeIndex(timeIndex);
- VH5TypedPath path = new VH5TypedPath<>(root, H5ScalarDS.class, "boundaries");
+ MovingBoundardyVH5TypedPath path = new MovingBoundardyVH5TypedPath<>(root, H5ScalarDS.class, "boundaries");
H5ScalarDS hsd = path.get();
hsd.init();
long[] start = hsd.getStartDims();
diff --git a/vcell-core/src/main/java/org/vcell/vis/io/ChomboFileReader.java b/vcell-core/src/main/java/org/vcell/vis/io/ChomboFileReader.java
index 2034ae1d62..969384a092 100644
--- a/vcell-core/src/main/java/org/vcell/vis/io/ChomboFileReader.java
+++ b/vcell-core/src/main/java/org/vcell/vis/io/ChomboFileReader.java
@@ -1,10 +1,19 @@
package org.vcell.vis.io;
-import java.io.File;
+import java.io.*;
+import java.util.ArrayList;
import java.util.List;
+import java.util.StringTokenizer;
+import java.util.Vector;
+import java.util.zip.ZipEntry;
import javax.swing.tree.DefaultMutableTreeNode;
+import cbit.vcell.math.Variable;
+import ncsa.hdf.object.h5.H5CompoundDS;
+import ncsa.hdf.object.h5.H5ScalarDS;
+import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
+import org.apache.commons.compress.archivers.zip.ZipFile;
import org.vcell.vis.chombo.ChomboBoundaries;
import org.vcell.vis.chombo.ChomboBoundaries.BorderCellInfo;
import org.vcell.vis.chombo.ChomboBoundaries.MeshMetrics;
@@ -34,7 +43,186 @@ public class ChomboFileReader {
private static final String MESH_ATTR_DIMENSION = "dimension";
private static final String MESH_ATTR_ORIGIN = "origin";
private static final String MESH_ATTR_EXTENT = "extent";
-
+
+ public static File createTempHdf5File(InputStream is) throws IOException
+ {
+ OutputStream out = null;
+ try{
+ File tempFile = File.createTempFile("temp", "hdf5");
+ out=new FileOutputStream(tempFile);
+ byte buf[] = new byte[1024];
+ int len;
+ while((len=is.read(buf))>0) {
+ out.write(buf,0,len);
+ }
+ return tempFile;
+ }
+ finally
+ {
+ try {
+ if (out != null) {
+ out.close();
+ }
+ } catch (Exception ex) {
+ // ignore
+ }
+ }
+ }
+
+ static File createTempHdf5File(ZipFile zipFile, String fileName) throws IOException
+ {
+ InputStream is = null;
+ try
+ {
+ ZipEntry dataEntry = zipFile.getEntry(fileName);
+ is = zipFile.getInputStream((ZipArchiveEntry) dataEntry);
+ return createTempHdf5File(is);
+ }
+ finally
+ {
+ try
+ {
+ if (is != null)
+ {
+ is.close();
+ }
+ }
+ catch (Exception ex)
+ {
+ // ignore
+ }
+ }
+ }
+
+ public static List readHdf5SolutionMetaData(InputStream is) throws Exception
+ {
+ File tempFile = null;
+ FileFormat solFile = null;
+ ArrayList dataBlockList = new ArrayList<>();
+ try{
+ tempFile = createTempHdf5File(is);
+
+ FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
+ solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
+ solFile.open();
+ DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode();
+ Group rootGroup = (Group)rootNode.getUserObject();
+ Group solGroup = (Group)rootGroup.getMemberList().get(0);
+
+ List memberList = solGroup.getMemberList();
+ for (HObject member : memberList)
+ {
+ if (!(member instanceof Dataset)){
+ continue;
+ }
+ Dataset dataset = (Dataset)member;
+ String dsname = dataset.getName();
+ int vt = -1;
+ String domain = null;
+ List solAttrList = dataset.getMetadata();
+ for (Attribute attr : solAttrList)
+ {
+ String attrName = attr.getName();
+ if(attrName.equals("variable type")){
+ Object obj = attr.getValue();
+ vt = ((int[])obj)[0];
+ } else if (attrName.equals("domain")) {
+ Object obj = attr.getValue();
+ domain = ((String[])obj)[0];
+ }
+ }
+ long[] dims = dataset.getDims();
+ String varName = domain == null ? dsname : domain + Variable.COMBINED_IDENTIFIER_SEPARATOR + dsname;
+ dataBlockList.add(DataBlock.createDataBlock(varName, vt, (int) dims[0], 0));
+ }
+ return dataBlockList;
+ } finally {
+ try {
+ if (solFile != null) {
+ solFile.close();
+ }
+ if (tempFile != null) {
+ if (!tempFile.delete()) {
+ System.err.println("couldn't delete temp file " + tempFile);
+ }
+ }
+ } catch(Exception e) {
+ // ignore
+ }
+ }
+ }
+
+
+ /**
+ * Z = boolean
+ [B = byte
+ [S = short
+ [I = int
+ [J = long
+ [F = float
+ [D = double
+ [C = char
+ [L = any non-primitives(Object)
+ * @author schaff
+ *
+ */
+ static abstract class DataColumn {
+ private String colName;
+ public DataColumn(String name){
+ this.colName = name;
+ }
+ public abstract int getNumRows();
+ public abstract double getValue(int index);
+ }
+
+ static class IntColumn extends DataColumn {
+ int[] data;
+ public IntColumn(String name, int[] data){
+ super(name);
+ this.data = data;
+ }
+ @Override
+ public int getNumRows(){
+ return data.length;
+ }
+ @Override
+ public double getValue(int index){
+ return data[index];
+ }
+ }
+
+ static class LongColumn extends DataColumn {
+ long[] data;
+ public LongColumn(String name, long[] data){
+ super(name);
+ this.data = data;
+ }
+ @Override
+ public int getNumRows(){
+ return data.length;
+ }
+ @Override
+ public double getValue(int index){
+ return data[index];
+ }
+ }
+
+ static class DoubleColumn extends DataColumn {
+ double[] data;
+ public DoubleColumn(String name, double[] data){
+ super(name);
+ this.data = data;
+ }
+ @Override
+ public int getNumRows(){
+ return data.length;
+ }
+ @Override
+ public double getValue(int index){
+ return data[index];
+ }
+ }
+
private static ChomboMeshData readMesh(String meshFileName, String vol0FileName) throws Exception{
ChomboMesh chomboMesh = new ChomboMesh();
@@ -52,11 +240,11 @@ private static ChomboMeshData readMesh(String meshFileName, String vol0FileName)
DefaultMutableTreeNode meshRootNode = (DefaultMutableTreeNode)meshFile.getRootNode();
Group meshRootGroup = (Group)meshRootNode.getUserObject();
- Group meshGroup = Hdf5Reader.getChildGroup(meshRootGroup,"mesh");
+ Group meshGroup = getChildGroup(meshRootGroup,"mesh");
- chomboMesh.setDimension(Hdf5Reader.getIntAttribute(meshGroup,MESH_ATTR_DIMENSION));
- chomboMesh.setExtent(Hdf5Reader.getVect3DAttribute(meshGroup,MESH_ATTR_EXTENT,1.0));
- chomboMesh.setOrigin(Hdf5Reader.getVect3DAttribute(meshGroup,MESH_ATTR_ORIGIN,0.0));
+ chomboMesh.setDimension(getIntAttribute(meshGroup,MESH_ATTR_DIMENSION));
+ chomboMesh.setExtent(getVect3DAttribute(meshGroup,MESH_ATTR_EXTENT,1.0));
+ chomboMesh.setOrigin(getVect3DAttribute(meshGroup,MESH_ATTR_ORIGIN,0.0));
// it's very wasteful here, but what can I do?
CartesianMeshChombo cartesianMeshChombo = CartesianMeshChombo.readMeshFile(new File(meshFileName));
@@ -65,10 +253,10 @@ private static ChomboMeshData readMesh(String meshFileName, String vol0FileName)
chomboMesh.addFeaturePhase(fpv.feature, fpv.iphase);
}
- //Hdf5Reader.DataColumn[] metricsColumns = Hdf5Reader.getDataTable(meshGroup,METRICS_DATASET);
+ //DataColumn[] metricsColumns = getDataTable(meshGroup,METRICS_DATASET);
if (chomboMesh.getDimension()==2){
- Hdf5Reader.DataColumn[] segmentColumns = Hdf5Reader.getDataTable(meshGroup,"segments");
- Hdf5Reader.DataColumn[] verticesColumns = Hdf5Reader.getDataTable(meshGroup,"vertices");
+ DataColumn[] segmentColumns = getDataTable(meshGroup,"segments");
+ DataColumn[] verticesColumns = getDataTable(meshGroup,"vertices");
ChomboBoundaries boundaries = chomboMesh.getBoundaries();
int numVertices = verticesColumns[0].getNumRows();
int numSegments = segmentColumns[0].getNumRows();
@@ -85,7 +273,7 @@ private static ChomboMeshData readMesh(String meshFileName, String vol0FileName)
boundaries.addSegment(new ChomboBoundaries.Segment(chomboIndex, v1, v2));
}
}else if (chomboMesh.getDimension()==3){
- Hdf5Reader.DataColumn[] surfaceTriangleColumns = Hdf5Reader.getDataTable(meshGroup,"surface triangles");
+ DataColumn[] surfaceTriangleColumns = getDataTable(meshGroup,"surface triangles");
ChomboBoundaries boundaries = chomboMesh.getBoundaries();
int numTriangles = surfaceTriangleColumns[0].getNumRows();
for (int row=0;row children = vcellGroup.getMemberList();
@@ -267,4 +455,110 @@ private static void readMembraneVarData(ChomboMeshData chomboMeshData, Group roo
}
}
}
+
+ private static Attribute getAttribute(Group group, String name) throws Exception{
+ List attributes = group.getMetadata();
+ for (Attribute attr : attributes){
+ if (attr.getName().equals(name)){
+ return attr;
+ }
+ }
+ throw new RuntimeException("failed to find attribute "+name);
+ }
+
+ private static double getDoubleAttribute(Group group, String name) throws Exception{
+ Attribute attr = getAttribute(group,name);
+ return ((double[])attr.getValue())[0];
+ }
+
+ private static float getFloatAttribute(Group group, String name) throws Exception{
+ Attribute attr = getAttribute(group,name);
+ return ((float[])attr.getValue())[0];
+ }
+
+ private static int getIntAttribute(Group group, String name) throws Exception{
+ Attribute attr = getAttribute(group,name);
+ return ((int[])attr.getValue())[0];
+ }
+
+ private static String getStringAttribute(Group group, String name) throws Exception{
+ Attribute attr = getAttribute(group,name);
+ return ((String[])attr.getValue())[0];
+ }
+
+ private static Vect3D getVect3DAttribute(Group group, String name, double defaultZ) throws Exception{
+ String str = getStringAttribute(group, name);
+ return parseAttrString(str,defaultZ);
+ }
+
+ private static Group getChildGroup(Group group, String name){
+ List memberList = group.getMemberList();
+ for (HObject member : memberList) {
+ if (member.getName().equals(name)){
+ if (member instanceof Group) {
+ return (Group)member;
+ }else{
+ throw new RuntimeException("expecting type Group for group member '"+name+"'");
+ }
+ }
+ }
+ throw new RuntimeException("child group '"+name+"' not found");
+ }
+
+ private static DataColumn[] getDataTable(Group group, String name) throws Exception{
+ List memberList = group.getMemberList();
+ for (HObject member : memberList) {
+ if (member.getName().equals(name)){
+ if (member instanceof H5CompoundDS) {
+ H5CompoundDS compoundDataSet = (H5CompoundDS) member;
+ Vector columnValueArrays = (Vector)compoundDataSet.read();
+ String[] columnNames = compoundDataSet.getMemberNames();
+ ArrayList dataColumns = new ArrayList();
+ for (int c=0;c valueList = new ArrayList();
+ while (st.hasMoreTokens())
+ {
+ String token = st.nextToken();
+ valueList.add(Double.parseDouble(token));
+ }
+ if (valueList.size()==2){
+ return new Vect3D(valueList.get(0),valueList.get(1),defaultZ);
+ }else if (valueList.size()==3){
+ return new Vect3D(valueList.get(0),valueList.get(1),valueList.get(2));
+ }else{
+ throw new RuntimeException("cannot parse, unexpected array size "+valueList.size());
+ }
+ }
}
diff --git a/vcell-core/src/main/java/org/vcell/vis/io/DataSet.java b/vcell-core/src/main/java/org/vcell/vis/io/DataSet.java
index 7f2952e2aa..8cddfbbc3d 100644
--- a/vcell-core/src/main/java/org/vcell/vis/io/DataSet.java
+++ b/vcell-core/src/main/java/org/vcell/vis/io/DataSet.java
@@ -9,37 +9,18 @@
*/
package org.vcell.vis.io;
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.List;
-import java.util.Vector;
-import java.util.zip.ZipEntry;
-
-import javax.swing.tree.DefaultMutableTreeNode;
+import cbit.vcell.math.VariableType;
+import cbit.vcell.simdata.ChomboSimDataReader;
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
-//import java.util.zip.ZipFile;
import org.apache.commons.compress.archivers.zip.ZipFile;
-
-import cbit.vcell.math.Variable;
-import cbit.vcell.math.VariableType;
-import ncsa.hdf.object.Attribute;
-import ncsa.hdf.object.Dataset;
-import ncsa.hdf.object.FileFormat;
-import ncsa.hdf.object.Group;
-import ncsa.hdf.object.HObject;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
+import java.io.*;
+import java.util.List;
+import java.util.Vector;
+
public class DataSet implements java.io.Serializable
{
private final static Logger lg = LogManager.getLogger(DataSet.class);
@@ -213,7 +194,10 @@ public void read(File file, File zipFile) throws IOException, OutOfMemoryError {
if(is != null && zipFile!=null && isChombo(zipFile)){
try {
- readHdf5SolutionMetaData(is);
+ List dataBlockList = ChomboFileReader.readHdf5SolutionMetaData(is);
+ for (DataBlock dataBlock : dataBlockList) {
+ this.dataBlockList.addElement(dataBlock);
+ }
} catch (Exception e) {
throw new IOException(e.getMessage(),e);
}
@@ -240,115 +224,8 @@ public void read(File file, File zipFile) throws IOException, OutOfMemoryError {
private boolean isChombo(File zipFile){
return zipFile.getName().endsWith(".hdf5.zip");
}
-
- private static File createTempHdf5File(InputStream is) throws IOException
- {
- OutputStream out = null;
- try{
- File tempFile = File.createTempFile("temp", "hdf5");
- out=new FileOutputStream(tempFile);
- byte buf[] = new byte[1024];
- int len;
- while((len=is.read(buf))>0) {
- out.write(buf,0,len);
- }
- return tempFile;
- }
- finally
- {
- try {
- if (out != null) {
- out.close();
- }
- } catch (Exception ex) {
- // ignore
- }
- }
- }
-
- static File createTempHdf5File(ZipFile zipFile, String fileName) throws IOException
- {
- InputStream is = null;
- try
- {
- ZipEntry dataEntry = zipFile.getEntry(fileName);
- is = zipFile.getInputStream((ZipArchiveEntry) dataEntry);
- return createTempHdf5File(is);
- }
- finally
- {
- try
- {
- if (is != null)
- {
- is.close();
- }
- }
- catch (Exception ex)
- {
- // ignore
- }
- }
- }
-
-
- private void readHdf5SolutionMetaData(InputStream is) throws Exception
- {
- File tempFile = null;
- FileFormat solFile = null;
- try{
- tempFile = createTempHdf5File(is);
-
- FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
- solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
- solFile.open();
- DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode();
- Group rootGroup = (Group)rootNode.getUserObject();
- Group solGroup = (Group)rootGroup.getMemberList().get(0);
-
- List memberList = solGroup.getMemberList();
- for (HObject member : memberList)
- {
- if (!(member instanceof Dataset)){
- continue;
- }
- Dataset dataset = (Dataset)member;
- String dsname = dataset.getName();
- int vt = -1;
- String domain = null;
- List solAttrList = dataset.getMetadata();
- for (Attribute attr : solAttrList)
- {
- String attrName = attr.getName();
- if(attrName.equals("variable type")){
- Object obj = attr.getValue();
- vt = ((int[])obj)[0];
- } else if (attrName.equals("domain")) {
- Object obj = attr.getValue();
- domain = ((String[])obj)[0];
- }
- }
- long[] dims = dataset.getDims();
- String varName = domain == null ? dsname : domain + Variable.COMBINED_IDENTIFIER_SEPARATOR + dsname;
- dataBlockList.addElement(DataBlock.createDataBlock(varName, vt, (int) dims[0], 0));
- }
- } finally {
- try {
- if (solFile != null) {
- solFile.close();
- }
- if (tempFile != null) {
- if (!tempFile.delete()) {
- System.err.println("couldn't delete temp file " + tempFile);
- }
- }
- } catch(Exception e) {
- // ignore
- }
- }
- }
-
-
+
+
public static void writeNew(File file, String[] varNameArr, VariableType[] varTypeArr, org.vcell.util.ISize size, double[][] dataArr) throws IOException {
FileOutputStream fos = null;
diff --git a/vcell-core/src/main/java/org/vcell/vis/io/Hdf5Reader.java b/vcell-core/src/main/java/org/vcell/vis/io/Hdf5Reader.java
deleted file mode 100644
index 619ccc4061..0000000000
--- a/vcell-core/src/main/java/org/vcell/vis/io/Hdf5Reader.java
+++ /dev/null
@@ -1,195 +0,0 @@
-package org.vcell.vis.io;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.StringTokenizer;
-import java.util.Vector;
-
-import org.vcell.vis.core.Vect3D;
-
-import ncsa.hdf.object.Attribute;
-import ncsa.hdf.object.Group;
-import ncsa.hdf.object.HObject;
-import ncsa.hdf.object.h5.H5CompoundDS;
-import ncsa.hdf.object.h5.H5ScalarDS;
-
-public class Hdf5Reader {
-
- /**
- * Z = boolean
- [B = byte
- [S = short
- [I = int
- [J = long
- [F = float
- [D = double
- [C = char
- [L = any non-primitives(Object)
- * @author schaff
- *
- */
- public static abstract class DataColumn {
- private String colName;
- public DataColumn(String name){
- this.colName = name;
- }
- public abstract int getNumRows();
- public abstract double getValue(int index);
- }
-
- public static class IntColumn extends DataColumn {
- int[] data;
- public IntColumn(String name, int[] data){
- super(name);
- this.data = data;
- }
- @Override
- public int getNumRows(){
- return data.length;
- }
- @Override
- public double getValue(int index){
- return data[index];
- }
- }
-
- public static class LongColumn extends DataColumn {
- long[] data;
- public LongColumn(String name, long[] data){
- super(name);
- this.data = data;
- }
- @Override
- public int getNumRows(){
- return data.length;
- }
- @Override
- public double getValue(int index){
- return data[index];
- }
- }
-
- public static class DoubleColumn extends DataColumn {
- double[] data;
- public DoubleColumn(String name, double[] data){
- super(name);
- this.data = data;
- }
- @Override
- public int getNumRows(){
- return data.length;
- }
- @Override
- public double getValue(int index){
- return data[index];
- }
- }
-
- public static Attribute getAttribute(Group group, String name) throws Exception{
- List attributes = group.getMetadata();
- for (Attribute attr : attributes){
- if (attr.getName().equals(name)){
- return attr;
- }
- }
- throw new RuntimeException("failed to find attribute "+name);
- }
-
- public static double getDoubleAttribute(Group group, String name) throws Exception{
- Attribute attr = getAttribute(group,name);
- return ((double[])attr.getValue())[0];
- }
-
- public static float getFloatAttribute(Group group, String name) throws Exception{
- Attribute attr = getAttribute(group,name);
- return ((float[])attr.getValue())[0];
- }
-
- public static int getIntAttribute(Group group, String name) throws Exception{
- Attribute attr = getAttribute(group,name);
- return ((int[])attr.getValue())[0];
- }
-
- public static String getStringAttribute(Group group, String name) throws Exception{
- Attribute attr = getAttribute(group,name);
- return ((String[])attr.getValue())[0];
- }
-
- public static Vect3D getVect3DAttribute(Group group, String name, double defaultZ) throws Exception{
- String str = getStringAttribute(group, name);
- return parseAttrString(str,defaultZ);
- }
-
- public static Group getChildGroup(Group group, String name){
- List memberList = group.getMemberList();
- for (HObject member : memberList) {
- if (member.getName().equals(name)){
- if (member instanceof Group) {
- return (Group)member;
- }else{
- throw new RuntimeException("expecting type Group for group member '"+name+"'");
- }
- }
- }
- throw new RuntimeException("child group '"+name+"' not found");
- }
-
- public static Hdf5Reader.DataColumn[] getDataTable(Group group, String name) throws Exception{
- List memberList = group.getMemberList();
- for (HObject member : memberList) {
- if (member.getName().equals(name)){
- if (member instanceof H5CompoundDS) {
- H5CompoundDS compoundDataSet = (H5CompoundDS) member;
- Vector columnValueArrays = (Vector)compoundDataSet.read();
- String[] columnNames = compoundDataSet.getMemberNames();
- ArrayList dataColumns = new ArrayList();
- for (int c=0;c valueList = new ArrayList();
- while (st.hasMoreTokens())
- {
- String token = st.nextToken();
- valueList.add(Double.parseDouble(token));
- }
- if (valueList.size()==2){
- return new Vect3D(valueList.get(0),valueList.get(1),defaultZ);
- }else if (valueList.size()==3){
- return new Vect3D(valueList.get(0),valueList.get(1),valueList.get(2));
- }else{
- throw new RuntimeException("cannot parse, unexpected array size "+valueList.size());
- }
- }
-
-
-}
diff --git a/vcell-core/src/test/java/cbit/vcell/solvers/mb/VH5PathTest.java b/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundardyVH5PathTest.java
similarity index 71%
rename from vcell-core/src/test/java/cbit/vcell/solvers/mb/VH5PathTest.java
rename to vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundardyVH5PathTest.java
index 1ee0844c14..9c6a2e59a3 100644
--- a/vcell-core/src/test/java/cbit/vcell/solvers/mb/VH5PathTest.java
+++ b/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundardyVH5PathTest.java
@@ -24,7 +24,7 @@
@Disabled
@Tag("Fast")
-public class VH5PathTest extends H5Client {
+public class MovingBoundardyVH5PathTest extends MovingBoundaryH5Client {
private static String fname = FILE;
private FileFormat testFile = null;
private Group root = null;
@@ -65,25 +65,25 @@ public void run() {
// create the file and add groups ans dataset into the file
try {
Group root = (Group) ((javax.swing.tree.DefaultMutableTreeNode) testFile.getRootNode()).getUserObject();
- VH5Path vpath = new VH5Path(root, "elements" ,"volume");
+ MovingBoundardyVH5Path vpath = new MovingBoundardyVH5Path(root, "elements" ,"volume");
System.out.println(vpath.foundType());
- VH5TypedPath tpath = new VH5TypedPath(root, double[].class,"elements" ,"volume");
+ MovingBoundardyVH5TypedPath tpath = new MovingBoundardyVH5TypedPath(root, double[].class,"elements" ,"volume");
double[] e = tpath.get();
System.out.println(e[0]);
- VH5Path bpPath = new VH5Path(root, "elements" ,"boundaryPosition");
+ MovingBoundardyVH5Path bpPath = new MovingBoundardyVH5Path(root, "elements" ,"boundaryPosition");
Object data = bpPath.getData();
System.out.println(data.getClass().getSimpleName());
- VH5Path vpPath = new VH5Path(root, "elements" ,"volumePoints");
+ MovingBoundardyVH5Path vpPath = new MovingBoundardyVH5Path(root, "elements" ,"volumePoints");
data = vpPath.getData();
System.out.println(data.getClass().getSimpleName());
-// VH5TypedPath spath = new VH5TypedPath(root, String[].class,"elements" ,"front description");
- VH5TypedPath spath = new VH5TypedPath(root, String.class,"elements" ,"front description");
+// MovingBoundardyVH5TypedPath spath = new MovingBoundardyVH5TypedPath(root, String[].class,"elements" ,"front description");
+ MovingBoundardyVH5TypedPath spath = new MovingBoundardyVH5TypedPath(root, String.class,"elements" ,"front description");
// String[] sdata = spath.get();
// System.out.println(sdata[0]);
System.out.println(spath.get( ));
- VH5Path xpath = new VH5Path(root, "elements" ,"front description");
+ MovingBoundardyVH5Path xpath = new MovingBoundardyVH5Path(root, "elements" ,"front description");
Object o = xpath.getData();
System.out.println(o);
dtype("elements","endX");
@@ -94,15 +94,15 @@ public void run() {
dtype("solverTimeStep");
dtype("timeStep");
dtype("timeStepTimes");
- VH5TypedPath ipath = new VH5TypedPath