From c1d75331529a156f87160c78038fc50ae3d0a175 Mon Sep 17 00:00:00 2001 From: Jim Schaff Date: Tue, 23 Apr 2024 10:33:46 -0400 Subject: [PATCH 01/16] remove unused vcell-web service --- docker/build/Dockerfile-web-dev | 85 --- docker/build/vcell-web.log4j.xml | 32 -- vcell-web/.gitignore | 1 - vcell-web/pom.xml | 105 ---- .../main/java/org/vcell/web/MainService.java | 512 ------------------ 5 files changed, 735 deletions(-) delete mode 100644 docker/build/Dockerfile-web-dev delete mode 100644 docker/build/vcell-web.log4j.xml delete mode 100644 vcell-web/.gitignore delete mode 100644 vcell-web/pom.xml delete mode 100644 vcell-web/src/main/java/org/vcell/web/MainService.java diff --git a/docker/build/Dockerfile-web-dev b/docker/build/Dockerfile-web-dev deleted file mode 100644 index a35261b062..0000000000 --- a/docker/build/Dockerfile-web-dev +++ /dev/null @@ -1,85 +0,0 @@ -FROM eclipse-temurin:17 as jre-build - -# Create a custom Java runtime -RUN $JAVA_HOME/bin/jlink \ - --add-modules ALL-MODULE-PATH \ - --strip-debug \ - --no-man-pages \ - --no-header-files \ - --compress=2 \ - --output /javaruntime - -# Define base image and copy in jlink created minimal Java 17 environment -FROM debian:buster-slim -ENV JAVA_HOME=/opt/java/openjdk -ENV PATH "${JAVA_HOME}/bin:${PATH}" -COPY --from=jre-build /javaruntime $JAVA_HOME - -RUN apt-get -y update && apt-get install -y bash nano wget apt-utils libfreetype6 fontconfig fonts-dejavu - -RUN DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends tzdata -RUN unlink /etc/localtime || true -RUN ln -s /usr/share/zoneinfo/America/New_York /etc/localtime - -RUN DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends tzdata -RUN unlink /etc/localtime || true -RUN ln -s /usr/share/zoneinfo/America/New_York /etc/localtime - -WORKDIR /usr/local/app - -COPY ./vcell-web/target/vcell-web-0.0.1-SNAPSHOT.jar \ - ./vcell-web/target/maven-jars/*.jar \ - ./lib/ - -COPY ./pythonVtk ./pythonVtk -COPY ./nativelibs/linux64 ./nativelibs/linux64 -COPY ./docker/build/vcell-web.log4j.xml . - -ENV softwareVersion=SOFTWARE-VERSION-NOT-SET \ - serverid=SITE \ - dburl="db-url-not-set" \ - dbdriver="db-driver-not-set" \ - dbuser="db-user-not-set" \ - export_baseurl="export-baseurl-not-set" \ - simdatadir_external=/path/to/external/simdata/ \ - simdataCacheSize="simdataCacheSize-not-set" \ - webDataPort="webDataPort-not-set" - -ENV dbpswdfile=/run/secrets/dbpswd \ - keystore=/run/secrets/keystorefile \ - keystorepswdfile=/run/secrets/keystorepswd - - -VOLUME /simdata -VOLUME /simdata_secondary -VOLUME /exportdir - -EXPOSE 8000 - -ENTRYPOINT java \ - -Xdebug -agentlib:jdwp=transport=dt_socket,address=*:8000,server=y,suspend=n \ - -XX:MaxRAMPercentage=100 \ -# -XX:+PrintFlagsFinal -XshowSettings:vm \ - -Djava.awt.headless=true \ - -Dvcell.softwareVersion="${softwareVersion}" \ - -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager \ - -Dlog4j.configurationFile=/usr/local/app/vcell-web.log4j.xml \ - -Dvcell.server.id="${serverid}" \ - -Dvcell.server.dbConnectURL="${dburl}" \ - -Dvcell.server.dbDriverName="${dbdriver}" \ - -Dvcell.server.dbUserid="${dbuser}" \ - -Dvcell.db.pswdfile="${dbpswdfile}" \ - -Dvcell.python.executable=/usr/bin/python \ - -Dvcell.primarySimdatadir.internal=/simdata \ - -Dvcell.secondarySimdatadir.internal=/simdata_secondary \ - -Dvcell.primarySimdatadir.external="${simdatadir_external}" \ - -Dvcell.simdataCacheSize="${simdataCacheSize}" \ - -Dvcell.export.baseDir.internal=/exportdir \ - -Dvcell.export.baseURL="${export_baseurl}" \ - -Dvcell.installDir=/usr/local/app \ - -Dvcellapi.keystore.file="${keystore}" \ - -Dvcellapi.keystore.pswdfile="${keystorepswdfile}" \ - -Dvcelldata.web.server.port=${webDataPort} \ - -cp "./lib/*" org.vcell.web.MainService - - \ No newline at end of file diff --git a/docker/build/vcell-web.log4j.xml b/docker/build/vcell-web.log4j.xml deleted file mode 100644 index b9babe8b16..0000000000 --- a/docker/build/vcell-web.log4j.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/vcell-web/.gitignore b/vcell-web/.gitignore deleted file mode 100644 index da7560e07f..0000000000 --- a/vcell-web/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/.apt_generated_tests/ diff --git a/vcell-web/pom.xml b/vcell-web/pom.xml deleted file mode 100644 index 22b2d98f80..0000000000 --- a/vcell-web/pom.xml +++ /dev/null @@ -1,105 +0,0 @@ - - - - - 4.0.0 - - org.vcell - vcell-pom - 0.0.1-SNAPSHOT - - - vcell-web - vcell-web - VCell Data Export Web Interface - http://vcell.org/ - 1997 - - VCell - http://vcell.org - - - - MIT - https://opensource.org/licenses/MIT - - - - - frm - JFrank Morgan - - founder - lead - developer - debugger - reviewer - support - maintainer - - - - - - Various - - - - - - VCell Open Discussion Forum - https://groups.google.com/group/vcell-discuss - https://groups.google.com/group/vcell-discuss - vcell-discuss@googlegroups.com - https://groups.google.com/group/vcell-discuss - - - - - scm:git:git://github.com/virtualcell/vcell - scm:git:git@github.com:virtualcell/vcell - HEAD - https://github.com/virtualcell/vcell - - - Bugzilla - http://code3.cam.uchc.edu/bugzilla/ - - - Travis CI - https://travis-ci.org/virtualcell/vcell - - - - MIT - UConn Health - - - none - - - - - org.apache.httpcomponents - httpcore - ${httpcore.version} - - - org.apache.httpcomponents - httpclient - ${httpclient.version} - - - org.vcell - vcell-server - ${project.version} - - - org.junit.jupiter - junit-jupiter - test - - - diff --git a/vcell-web/src/main/java/org/vcell/web/MainService.java b/vcell-web/src/main/java/org/vcell/web/MainService.java deleted file mode 100644 index 02b3b28e93..0000000000 --- a/vcell-web/src/main/java/org/vcell/web/MainService.java +++ /dev/null @@ -1,512 +0,0 @@ -package org.vcell.web; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.net.URL; -import java.net.URLDecoder; -import java.net.URLEncoder; -import java.nio.charset.Charset; -import java.nio.file.Files; -import java.security.KeyStore; -import java.security.SecureRandom; -import java.sql.Connection; -import java.sql.SQLException; -import java.text.DateFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Base64; -import java.util.Comparator; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.StringTokenizer; -import java.util.TreeMap; -import java.util.TreeSet; - -import javax.net.ssl.KeyManager; -import javax.net.ssl.KeyManagerFactory; -import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManager; -import javax.net.ssl.TrustManagerFactory; - -import com.google.inject.Guice; -import com.google.inject.Injector; -import org.apache.commons.httpclient.URI; -import org.apache.http.Header; -import org.apache.http.HttpException; -import org.apache.http.HttpRequest; -import org.apache.http.HttpResponse; -import org.apache.http.HttpStatus; -import org.apache.http.NameValuePair; -import org.apache.http.client.utils.URLEncodedUtils; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.bootstrap.HttpServer; -import org.apache.http.impl.bootstrap.ServerBootstrap; -import org.apache.http.protocol.HttpContext; -import org.apache.http.protocol.HttpRequestHandler; -import org.apache.http.ssl.SSLContexts; -import org.vcell.db.ConnectionFactory; -import org.vcell.db.DatabaseService; -import org.vcell.db.DatabaseSyntax; -import org.vcell.db.KeyFactory; -import org.vcell.dependency.server.VCellServerModule; -import org.vcell.util.ConfigurationException; -import org.vcell.util.DataAccessException; -import org.vcell.util.ObjectNotFoundException; -import org.vcell.util.TokenMangler; -import org.vcell.util.document.BioModelChildSummary; -import org.vcell.util.document.BioModelInfo; -import org.vcell.util.document.KeyValue; -import org.vcell.util.document.User; -import org.vcell.util.document.UserLoginInfo; -import org.vcell.util.document.UserLoginInfo.DigestedPassword; -import org.vcell.util.document.VCDataIdentifier; -import org.vcell.util.document.VCInfoContainer; - -import cbit.vcell.export.server.ExportServiceImpl; -import cbit.vcell.message.messages.MessageConstants; -import cbit.vcell.modeldb.AdminDBTopLevel; -import cbit.vcell.modeldb.DatabaseServerImpl; -import cbit.vcell.modeldb.DbDriver; -import cbit.vcell.resource.PropertyLoader; -import cbit.vcell.simdata.Cachetable; -import cbit.vcell.simdata.DataServerImpl; -import cbit.vcell.simdata.DataSetControllerImpl; -import cbit.vcell.simdata.Hdf5Utils; -import cbit.vcell.solver.VCSimulationDataIdentifier; -import cbit.vcell.solver.VCSimulationIdentifier; -import cbit.vcell.solver.ode.ODESimData; -import cbit.vcell.util.ColumnDescription; -import ncsa.hdf.hdf5lib.H5; -import ncsa.hdf.hdf5lib.HDF5Constants; -import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; -import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException; - -public class MainService { - - private HttpServer server; - private static HashMap useridMap = new HashMap(); - private static ConnectionFactory conFactory; - - private static class AuthenticationInfo { - final User user; - final DigestedPassword digestedPassword; - AuthenticationInfo(User user, DigestedPassword digestedPassword){ - this.user = user; - this.digestedPassword = digestedPassword; - } - } - - public static void main(String[] args) { - try { - Injector injector = Guice.createInjector(new VCellServerModule()); - MainService mainService = injector.getInstance(MainService.class); - mainService.start(); - } catch (Exception e) { - e.printStackTrace(); - } - } - - public MainService() throws SQLException, DataAccessException, FileNotFoundException, ConfigurationException { - - MainService.conFactory = DatabaseService.getInstance().createConnectionFactory(); - KeyFactory keyFactory = conFactory.getKeyFactory(); - DatabaseServerImpl databaseServerImpl = new DatabaseServerImpl(conFactory, keyFactory); - AdminDBTopLevel adminDbTopLevel = new AdminDBTopLevel(conFactory); - String cacheSize = PropertyLoader.getRequiredProperty(PropertyLoader.simdataCacheSizeProperty); - long maxMemSize = Long.parseLong(cacheSize); - - Cachetable cacheTable = new Cachetable(MessageConstants.MINUTE_IN_MS * 20,maxMemSize); - DataSetControllerImpl dataSetControllerImpl = new DataSetControllerImpl(cacheTable, - new File(PropertyLoader.getRequiredProperty(PropertyLoader.primarySimDataDirInternalProperty)), - new File(PropertyLoader.getProperty(PropertyLoader.secondarySimDataDirInternalProperty, PropertyLoader.getRequiredProperty(PropertyLoader.primarySimDataDirInternalProperty)))); - ExportServiceImpl exportServiceImpl = new ExportServiceImpl(); - DataServerImpl dataServerImpl = new DataServerImpl(dataSetControllerImpl, exportServiceImpl); - - String exportBaseURL = PropertyLoader.getRequiredProperty(PropertyLoader.exportBaseURLProperty); - - try (InputStream inputStream = new FileInputStream(new File(System.getProperty(PropertyLoader.vcellapiKeystoreFile)))) { - final KeyStore serverKeyStore = KeyStore.getInstance("jks"); - String pwd = Files.readAllLines(new File(System.getProperty(PropertyLoader.vcellapiKeystorePswdFile)).toPath()).get(0); - serverKeyStore.load(inputStream, pwd.toCharArray()); - KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - keyManagerFactory.init(serverKeyStore, pwd.toCharArray()); - KeyManager[] serverKeyManagers = keyManagerFactory.getKeyManagers(); - TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - trustManagerFactory.init(serverKeyStore); - TrustManager[] serverTrustManagers = trustManagerFactory.getTrustManagers(); - final SSLContext sslContext = SSLContexts.createDefault(); - sslContext.init(serverKeyManagers, serverTrustManagers, new SecureRandom()); - int listenPort = Integer.parseInt(System.getProperty(PropertyLoader.webDataServerPort)); - server = ServerBootstrap.bootstrap().registerHandler("*", new HttpRequestHandler() { - @Override - public void handle(HttpRequest request, HttpResponse response, HttpContext context) throws HttpException, IOException { - if(request.getRequestLine().getMethod().toUpperCase().equals("GET")) { - URI uri = new URI(request.getRequestLine().getUri(),true); - final List parse = URLEncodedUtils.parse(uri.getQuery(),Charset.forName("utf-8")); - TreeMap queryMap = new TreeMap(); - for(NameValuePair nameValuePair:parse) { - String values = queryMap.get(nameValuePair.getName()); - if(values == null) { -// values = new ArrayList(); - queryMap.put(nameValuePair.getName(), nameValuePair.getValue()); - } -// values.add(nameValuePair.getValue()); - } - try { - User authuser = null; - //HttpRequest request = (HttpRequest)req; - //Use "WWW-Authenticate - Basic" authentication scheme - //Browser takes care of asking user for credentials and sending them - //Must be used with https connection to hide credentials - //Header authHeader = request.getHeaders().getFirst("Authorization"); - Header authHeader = request.getFirstHeader("Authorization"); - if(authHeader != null) {//caller included a user and password - String typeAndCredential = authHeader.getValue(); -// System.out.println("--"+up); - java.util.StringTokenizer st = new java.util.StringTokenizer(typeAndCredential," "); - String type=st.nextToken(); - String userAndPasswordB64 = st.nextToken(); - String s = new String(Base64.getDecoder().decode(userAndPasswordB64)); -// System.out.println("type="+type+" decoded="+s); - if(type.equals("Basic")) { - java.util.StringTokenizer st2 = new java.util.StringTokenizer(s,":"); - if(st2.countTokens() == 2) { - String usr=st2.nextToken(); - String pw = st2.nextToken(); - // System.out.println("user="+usr+" password="+pw); - UserLoginInfo.DigestedPassword dpw = new UserLoginInfo.DigestedPassword(pw); - // System.out.println(dpw); -// VCellApiApplication application = ((VCellApiApplication)getApplication()); -// authuser = application.getUserVerifier().authenticateUser(usr,dpw.getString().toCharArray()); - authuser = authenticateUser(usr,dpw.getString().toCharArray(),adminDbTopLevel); - // System.out.println(authuser); - } - } - } - if(authuser == null) { -// //If we get here either there was not user/pw or user/pw didn't authenticate -// //We need to add a response header -// //Response headers container might be null so add one if necessary -// if(((HttpResponse)response).getAllHeaders() == null) { -// ((HttpResponse)response).getAttributes(). -// put(HeaderConstants.ATTRIBUTE_HEADERS,new Series(Header.class)); -// } - //Tell whoever called us we want a user and password that we will check against admin vcell users - response.addHeader("WWW-Authenticate", "Basic"); - response.setStatusCode(HttpStatus.SC_UNAUTHORIZED); - return; - } - -// Form form = request.getResourceRef().getQueryAsForm(); -// if (form.getFirst("stats") != null){ -// String requestTypeString = form.getFirstValue("stats", true);//get .../rpc?stats=value 'value' -// if((authuser.getName().equals("frm") || -// authuser.getName().equals("les") || -// authuser.getName().equals("ion") || -// authuser.getName().equals("danv") || -// authuser.getName().equals("mblinov") || -// authuser.getName().equals("ACowan"))) { -// String result = restDatabaseService.getBasicStatistics(); -// response.setStatus(Status.SUCCESS_OK); -// response.setEntity(result, MediaType.TEXT_HTML); -// return; -// } -// -// }else - if(queryMap.get("route") != null && queryMap.get("type") != null && queryMap.get("simid") != null && queryMap.get("jobid") != null) { -// final Path path = Paths.get(uri.getPath()); -// final Iterator iterator = path.iterator(); - final String SIMDATADDF5 = "simhdf5"; - final String ODE = "ode"; - String simdataRoute = queryMap.get("route"); - String dataType = queryMap.get("type"); - if(simdataRoute.toLowerCase().equals(SIMDATADDF5) && dataType.toLowerCase().equals(ODE)) { - String simID = null; - TreeSet jobIDs = new TreeSet(); -// String userKey = null; -// String userid = null; - double blank = -1.0; - for(NameValuePair nvp:parse) { - if(nvp.getName().toLowerCase().equals("simid")) { - simID = nvp.getValue(); - }else if(nvp.getName().toLowerCase().equals("jobid")) { - String jobStr = URLDecoder.decode(nvp.getValue(), "UTF-8");// integer Separated by commas - StringTokenizer st = new StringTokenizer(jobStr,","); - while(st.hasMoreElements()) { - jobIDs.add(Integer.parseInt(st.nextToken())); - } - } -// else if(nvp.getName().toLowerCase().equals("userkey")) { -// userKey = nvp.getValue(); -// }else if(nvp.getName().toLowerCase().equals("userid")) { -// userid = URLDecoder.decode(nvp.getValue(), "UTF-8"); -// } - else if(nvp.getName().toLowerCase().equals("blank")) { - blank = Double.parseDouble(nvp.getValue()); - } - } -// User user = new User(userid,new KeyValue(userKey)); - VCSimulationIdentifier vcsid = new VCSimulationIdentifier(new KeyValue(simID), authuser); - File hdf5File = createOdeHdf5(vcsid, jobIDs.toArray(new Integer[0]), blank,dataServerImpl); - - URL url = new URL(exportBaseURL + hdf5File.getName()); - response.setStatusCode(HttpStatus.SC_MOVED_TEMPORARILY); - response.addHeader("Location",url.toString()); - response.setEntity(null); - return; - } - }else if(queryMap.get("modelInfos") != null) { - File hdf5TempFile = createInfosHdf5(authuser,databaseServerImpl); - URL url = new URL(exportBaseURL + hdf5TempFile.getName()); - response.setStatusCode(HttpStatus.SC_MOVED_TEMPORARILY); - response.addHeader("Location",url.toString()); - response.setEntity(null); - return; - } - - response.setStatusCode(HttpStatus.SC_NOT_FOUND); - String mesg = "req='"+uri.toString()+"'
No vcell-web service path matches this query
"+""; - StringEntity se = new StringEntity(mesg); - se.setContentType(ContentType.TEXT_HTML.getMimeType()); - response.setEntity(se); - - } catch (Exception e) { - String errMesg = "Error RpcRestlet.handle(...) req='"+request.toString()+"'
err='"+e.getMessage()+"'
"+""; -// getLogger().severe(errMesg); - e.printStackTrace(); - response.setStatusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR); - StringEntity se = new StringEntity(errMesg); - se.setContentType(ContentType.TEXT_HTML.getMimeType()); - response.setEntity(se); - } - - } - } - }).setListenerPort(listenPort).setSslContext(sslContext).create(); - - }catch(Exception e) { - e.printStackTrace(); - } - } - - public void start() throws IOException { - this.server.start(); - } - - private static File createInfosHdf5(User authuser,DatabaseServerImpl databaseServerImpl) throws IOException, HDF5LibraryException, HDF5Exception, DataAccessException { - String exportBaseDir = PropertyLoader.getRequiredProperty(PropertyLoader.exportBaseDirInternalProperty); - File hdf5TempFile = File.createTempFile("webexport_Infos_"+TokenMangler.fixTokenStrict(authuser.getName())+"_", ".hdf", new File(exportBaseDir)); -// System.out.println("/home/vcell/Downloads/hdf5/HDFView/bin/HDFView "+hdf5TempFile.getAbsolutePath()+" &"); - int hdf5FileID = H5.H5Fcreate(hdf5TempFile.getAbsolutePath(), HDF5Constants.H5F_ACC_TRUNC,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); - int bioModelsGroup = Hdf5Utils.createGroup(hdf5FileID, "BioModels"); - VCInfoContainer vcInfoContainer = null; - try (Connection con = conFactory.getConnection(null)) { - vcInfoContainer = DbDriver.getVCInfoContainer(authuser, con, DatabaseSyntax.ORACLE,DbDriver.EXTRAINFO_ALL); - } catch (SQLException e) { - e.printStackTrace(); - } - BioModelInfo[] bioModelInfos = vcInfoContainer.getBioModelInfos(); - Arrays.sort(bioModelInfos, new Comparator () { - @Override - public int compare(BioModelInfo o1, BioModelInfo o2) { - if(o1.getVersion().getOwner().getName().equals(o2.getVersion().getOwner().getName())) { - if(o1.getVersion().getName().equals(o2.getVersion().getName())) { - return o1.getVersion().getDate().compareTo(o2.getVersion().getDate()); - } - return o1.getVersion().getName().compareToIgnoreCase(o2.getVersion().getName()); - } - return o1.getVersion().getOwner().getName().compareToIgnoreCase(o2.getVersion().getOwner().getName()); - }}); - String lastUser = null; - String lastModel = null; - int lastUserGroupID = -1; - int lastModelGroupID = -1; - final DateFormat dateTimeInstance = DateFormat.getDateTimeInstance(); - for(BioModelInfo bioModelInfo:bioModelInfos) { - if(lastUser == null || !lastUser.equals(bioModelInfo.getVersion().getOwner().getName())) { - if(lastUserGroupID != -1) { - H5.H5Gclose(lastUserGroupID); - } - lastUser = bioModelInfo.getVersion().getOwner().getName(); - lastUserGroupID = Hdf5Utils.createGroup(bioModelsGroup, lastUser); - } -// System.out.println("'"+lastModel+"'"+" "+"'"+bioModelInfo.getVersion().getName()+"'"+" "+(bioModelInfo.getVersion().getName().equals(lastModel))); - if(lastModel == null || !lastModel.equals(bioModelInfo.getVersion().getName())) { - if(lastModelGroupID != -1) { - H5.H5Gclose(lastModelGroupID); - } - lastModel = bioModelInfo.getVersion().getName(); - lastModelGroupID = Hdf5Utils.createGroup(lastUserGroupID, lastModel.replace("/", "fwdslsh")); - - } -// int bioModelGroupID = Hdf5Utils.createGroup(lastUserGroupID, (bioModelInfo.getVersion().getName()).replace("/", "fwdslsh")); - final String format = dateTimeInstance.format(bioModelInfo.getVersion().getDate()); -// System.out.println(lastUser+" "+lastModel.replace("/", "fwdslsh")+" "+format); - int dateGroupID = Hdf5Utils.createGroup(lastModelGroupID,format); - //+"_"+dateTimeInstance.format(bioModelInfo.getVersion().getDate()) - Hdf5Utils.insertAttribute(dateGroupID, "versionKey", bioModelInfo.getVersion().getVersionKey().toString()); -// ArrayList ijContextInfos = new ArrayList<>(); - BioModelChildSummary bioModelChildSummary = bioModelInfo.getBioModelChildSummary(); - if(bioModelChildSummary != null && bioModelChildSummary.getSimulationContextNames() != null && bioModelInfo.getBioModelChildSummary().getSimulationContextNames().length > 0) { - for(int i = 0; i 0) { - ArrayList simNameArr = new ArrayList(); - for(String simName:bioModelInfo.getBioModelChildSummary().getSimulationNames(bioModelContextName)) { - int bmSimID = -1; - if(simName.contains("/")) {//handle "/" forbidden in object names - bmSimID = Hdf5Utils.createGroup(bmContextID, URLEncoder.encode(simName,"UTF-8")); - Hdf5Utils.insertAttribute(bmSimID,"urlencoded","true"); - }else { - bmSimID = Hdf5Utils.createGroup(bmContextID, simName); - } - Hdf5Utils.insertAttribute(bmSimID,"simid",(bioModelInfo.getSimID(simName)==null?"null":bioModelInfo.getSimID(simName).toString())); - Hdf5Utils.insertAttribute(bmSimID,"scancount",bioModelInfo.getScanCount(simName)+""); -// simNameArr.add(simName); -// simNameArr.add((bioModelInfo.getSimID(simName)==null?"null":bioModelInfo.getSimID(simName).toString())); -// simNameArr.add(bioModelInfo.getScanCount(simName)+""); - H5.H5Gclose(bmSimID); - } -// Hdf5Utils.insertStrings(bmContextID, "sims", new long[] {bioModelInfo.getBioModelChildSummary().getSimulationNames(bioModelContextName).length,3},simNameArr); - } - H5.H5Gclose(bmContextID); -// IJContextInfo ijContextInfo = new IJContextInfo(bioModelContextName,bioModelInfo.getBioModelChildSummary().getAppTypes()[i],bioModelInfo.getBioModelChildSummary().getGeometryDimensions()[i],bioModelInfo.getBioModelChildSummary().getGeometryNames()[i],ijSimInfos); -// ijContextInfos.add(ijContextInfo); - } - } - } - H5.H5Gclose(dateGroupID); - -// modelInfos.add(new IJModelInfo(bioModelInfo.getVersion().getName(), bioModelInfo.getVersion().getDate(), IJDocType.bm, openVCDocumentVersionKeys.contains(bioModelInfo.getVersion().getVersionKey()),bioModelInfo.getVersion().getOwner().getName(),bioModelInfo.getVersion().getVersionKey(), ijContextInfos)); - } - H5.H5Gclose(lastModelGroupID); - H5.H5Gclose(lastUserGroupID); - H5.H5Gclose(bioModelsGroup); - H5.H5Fclose(hdf5FileID); - return hdf5TempFile; - } - - private static File createOdeHdf5(VCSimulationIdentifier vcsid,Integer[] scanJobs,double blankCellValue/*for Histogram*/,DataServerImpl dataServerImpl) throws Exception{ - File hdf5TempFile = null; - int hdf5FileID = -1; - int jobGroupID = -1; - try { - String exportBaseDir = PropertyLoader.getRequiredProperty(PropertyLoader.exportBaseDirInternalProperty); - hdf5TempFile = File.createTempFile("webexport_Ode_"+vcsid.getSimulationKey()+"_", ".hdf", new File(exportBaseDir)); - hdf5FileID = H5.H5Fcreate(hdf5TempFile.getAbsolutePath(), HDF5Constants.H5F_ACC_TRUNC,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); - Hdf5Utils.insertAttribute(hdf5FileID, "simID",vcsid.getSimulationKey().toString());//Hdf5Utils.writeHDF5Dataset(hdf5FileID, "simID", null,vcsid.getSimulationKey().toString() , true); - Hdf5Utils.insertAttribute(hdf5FileID,"exportUser",vcsid.getOwner().getName()) ;//Hdf5Utils.writeHDF5Dataset(hdf5FileID, "exportUser", null,vcsid.getOwner().getName() , true); - - for(int scan=0;scan orderedColumnNames = new TreeSet(new Comparator() { - @Override - public int compare(ColumnDescription o1, ColumnDescription o2) { - if(o1.getName().equals("t") && o2.getName().equals("t")) {//'t' as first in the list - return 0; - }else if(o1.getName().equals("t")) { - return -1; - }else if(o2.getName().equals("t")) { - return 1; - } - return o1.getName().compareToIgnoreCase(o2.getName()); - }}); - orderedColumnNames.addAll(Arrays.asList(odeSimData.getColumnDescriptions())); - jobGroupID = (int) Hdf5Utils.createGroup(hdf5FileID, "Set "+scan); - //writeHDF5Dataset(hdf5FileID, "Set "+scan, null, null, false); - Hdf5Utils.HDF5WriteHelper help0 = Hdf5Utils.createDataset(jobGroupID, "data", new long[] {allColumnsCount,allRowsCount}); - //(HDF5WriteHelper) Hdf5Utils.writeHDF5Dataset(jobGroupID, "data", new long[] {allColumnsCount,allRowsCount}, new Object[] {}, false); - double[] fromData = new double[allColumnsCount*allRowsCount]; - int index = 0; - ArrayList dataTypes = new ArrayList(); - ArrayList dataIDs = new ArrayList(); - ArrayList dataShapes = new ArrayList(); - ArrayList dataLabels = new ArrayList(); - ArrayList dataNames = new ArrayList(); - Iterator columnNamesIterator = orderedColumnNames.iterator(); - while(columnNamesIterator.hasNext()) { - - ColumnDescription colDescr = columnNamesIterator.next(); - final String columnName = colDescr.getName(); - final int columnIndex = odeSimData.findColumn(columnName); - dataTypes.add("float64"); - dataIDs.add("data_set_"+columnName); - dataShapes.add(allRowsCount+""); - dataLabels.add(columnName); - dataNames.add(columnName/*name*/); - double[] columnData = odeSimData.extractColumn(columnIndex); - for(int myrows=0;myrows Date: Tue, 23 Apr 2024 10:45:27 -0400 Subject: [PATCH 02/16] remove unused SimDataWebService which uses legacy HDF5 library --- docker/build/Dockerfile-api-dev | 2 - docker/build/Dockerfile-data-dev | 2 - docker/kustomize/config/devjim/api.env | 2 - docker/kustomize/config/devjim/data.env | 2 - docker/kustomize/config/stage/api.env | 1 - docker/kustomize/config/stage/data.env | 2 - docker/swarm/docker-compose-small.yml | 4 - docker/swarm/docker-compose.yml | 4 - .../swarm/localconfig_mockslurm-not-used.sh | 2 - docker/swarm/localconfig_realslurm_oracle.sh | 2 - .../localconfig_realslurm_oracle_zeke.sh | 3 +- .../swarm/localconfig_realslurm_postgres.sh | 2 - docker/swarm/serverconfig-uch.sh | 2 - .../cbit/vcell/resource/PropertyLoader.java | 2 - .../message/server/data/SimDataServer.java | 3 - .../server/data/SimDataServerMain.java | 3 +- .../server/data/SimDataWebService.java | 222 ------------------ 17 files changed, 2 insertions(+), 258 deletions(-) delete mode 100644 vcell-server/src/main/java/cbit/vcell/message/server/data/SimDataWebService.java diff --git a/docker/build/Dockerfile-api-dev b/docker/build/Dockerfile-api-dev index 04fe73a18e..64dfc3a867 100644 --- a/docker/build/Dockerfile-api-dev +++ b/docker/build/Dockerfile-api-dev @@ -65,7 +65,6 @@ ENV softwareVersion=SOFTWARE-VERSION-NOT-SET \ smtp_port="smtp-port-not-set" \ smtp_emailaddress="smtp-emailaddress-not-set" \ simdataCacheSize="simdataCacheSize-not-set" \ - webDataPort="webDataPort-not-set" \ ssl_ignoreHostMismatch=true \ ssl_ignoreCertProblems=false \ serverPrefixV0="server-path-prefix-v0-not-set" \ @@ -115,7 +114,6 @@ ENTRYPOINT java \ -Dvcellapi.keystore.pswdfile="${keystorepswdfile}" \ -Dvcell.smtp.hostName="${smtp_hostname}" \ -Dvcell.smtp.port="${smtp_port}" \ - -Dvcelldata.web.server.port=${webDataPort} \ -Dvcell.smtp.emailAddress="${smtp_emailaddress}" \ -Dvcell.ssl.ignoreHostMismatch="${ssl_ignoreHostMismatch}" \ -Dvcell.ssl.ignoreCertProblems="${ssl_ignoreCertProblems}" \ diff --git a/docker/build/Dockerfile-data-dev b/docker/build/Dockerfile-data-dev index 30d48b83ee..8d634ca4d1 100644 --- a/docker/build/Dockerfile-data-dev +++ b/docker/build/Dockerfile-data-dev @@ -58,7 +58,6 @@ ENV softwareVersion=SOFTWARE-VERSION-NOT-SET \ export_baseurl="export-baseurl-not-set" \ simdatadir_external=/path/to/external/simdata/ \ simdataCacheSize="simdataCacheSize-not-set" \ - webDataPort="webDataPort-not-set" \ servertype="servertype-not-set" \ s3ProxyPortExternal="80" \ s3export_baseURL="s3-export-baseurl-not-set" @@ -110,6 +109,5 @@ ENTRYPOINT java \ -Dvcell.mongodb.host.internal=${mongodb_host_internal} \ -Dvcell.mongodb.port.internal=${mongodb_port_internal} \ -Dvcell.mongodb.database=${mongodb_database} \ - -Dvcelldata.web.server.port=${webDataPort} \ -cp "./lib/*" cbit.vcell.message.server.data.SimDataServerMain \ "${servertype}" diff --git a/docker/kustomize/config/devjim/api.env b/docker/kustomize/config/devjim/api.env index 0d6a807a35..83397fb64e 100644 --- a/docker/kustomize/config/devjim/api.env +++ b/docker/kustomize/config/devjim/api.env @@ -1,7 +1,5 @@ simdataCacheSize=10000000 -webDataPort=55556 - smtp_emailaddress=VCell_Support@uchc.edu smtp_hostname=vdsmtp.cam.uchc.edu smtp_port=25 diff --git a/docker/kustomize/config/devjim/data.env b/docker/kustomize/config/devjim/data.env index 6c878223f3..f8fc4815a4 100644 --- a/docker/kustomize/config/devjim/data.env +++ b/docker/kustomize/config/devjim/data.env @@ -1,7 +1,5 @@ simdataCacheSize=10000000 -webDataPort=55556 - s3ProxyPortExternal=8089 s3export_baseURL=https://localhost diff --git a/docker/kustomize/config/stage/api.env b/docker/kustomize/config/stage/api.env index 185b46b07e..5ecab23579 100644 --- a/docker/kustomize/config/stage/api.env +++ b/docker/kustomize/config/stage/api.env @@ -1,6 +1,5 @@ simdataCacheSize=10000000 -webDataPort=55556 serverPrefixV0=/api/v0 smtp_emailaddress=VCell_Support@uchc.edu diff --git a/docker/kustomize/config/stage/data.env b/docker/kustomize/config/stage/data.env index 6c878223f3..f8fc4815a4 100644 --- a/docker/kustomize/config/stage/data.env +++ b/docker/kustomize/config/stage/data.env @@ -1,7 +1,5 @@ simdataCacheSize=10000000 -webDataPort=55556 - s3ProxyPortExternal=8089 s3export_baseURL=https://localhost diff --git a/docker/swarm/docker-compose-small.yml b/docker/swarm/docker-compose-small.yml index 715c1337d1..2575a0f090 100644 --- a/docker/swarm/docker-compose-small.yml +++ b/docker/swarm/docker-compose-small.yml @@ -22,8 +22,6 @@ services: - mongodb_port_internal=27017 - mongodb_database=test - - webDataPort=${VCELL_WEB_DATA_PORT} - - smtp_hostname=${VCELL_SMTP_HOSTNAME} - smtp_port=${VCELL_SMTP_PORT} - smtp_emailaddress=${VCELL_SMTP_EMAILADDRESS} @@ -118,8 +116,6 @@ services: - s3ProxyPortExternal=${VCELL_S3PROXY_PORT_EXTERNAL} - s3export_baseURL=${VCELL_S3_EXPORT_BASEURL} - - webDataPort=${VCELL_WEB_DATA_PORT} - - servertype=CombinedData ports: - "127.0.0.1:${VCELL_DEBUG_PORT_VCELL_DATA}:8000" # java remote debugging diff --git a/docker/swarm/docker-compose.yml b/docker/swarm/docker-compose.yml index 1d24c21c37..de7b3b6d54 100644 --- a/docker/swarm/docker-compose.yml +++ b/docker/swarm/docker-compose.yml @@ -24,8 +24,6 @@ services: - mongodb_port_internal=27017 - mongodb_database=test - - webDataPort=${VCELL_WEB_DATA_PORT} - - smtp_hostname=${VCELL_SMTP_HOSTNAME} - smtp_port=${VCELL_SMTP_PORT} - smtp_emailaddress=${VCELL_SMTP_EMAILADDRESS} @@ -284,8 +282,6 @@ services: - s3ProxyPortExternal=${VCELL_S3PROXY_PORT_EXTERNAL} - s3export_baseURL=${VCELL_S3_EXPORT_BASEURL} - - webDataPort=${VCELL_WEB_DATA_PORT} - - servertype=CombinedData ports: - "${VCELL_DEBUG_PORT_VCELL_DATA}:8000" # java remote debugging diff --git a/docker/swarm/localconfig_mockslurm-not-used.sh b/docker/swarm/localconfig_mockslurm-not-used.sh index c314b4c409..0c3c20d8bf 100755 --- a/docker/swarm/localconfig_mockslurm-not-used.sh +++ b/docker/swarm/localconfig_mockslurm-not-used.sh @@ -120,7 +120,6 @@ VCELL_EXPORTDIR_HOST=/Volumes/vcell/export/ VCELL_MAX_JOBS_PER_SCAN=100 VCELL_MAX_ODE_JOBS_PER_USER=100 VCELL_MAX_PDE_JOBS_PER_USER=40 -VCELL_WEB_DATA_PORT=55556 VCELL_SSH_CMD_TIMEOUT=10000 VCELL_SSH_CMD_RESTORE_TIMEOUT=5 @@ -193,6 +192,5 @@ VCELL_TAG=$VCELL_TAG VCELL_UPDATE_SITE=http://vcell.org/webstart/${_site_camel} VCELL_VERSION_NUMBER=$VCELL_VERSION_NUMBER VCELL_VERSION=${_site_camel}_Version_${VCELL_VERSION_NUMBER}_build_${VCELL_BUILD_NUMBER} -VCELL_WEB_DATA_PORT=$VCELL_WEB_DATA_PORT EOF diff --git a/docker/swarm/localconfig_realslurm_oracle.sh b/docker/swarm/localconfig_realslurm_oracle.sh index c72a06d02b..d54f2bc0ab 100755 --- a/docker/swarm/localconfig_realslurm_oracle.sh +++ b/docker/swarm/localconfig_realslurm_oracle.sh @@ -145,7 +145,6 @@ VCELL_EXPORTDIR_HOST=/Volumes/vcell/export/ VCELL_MAX_JOBS_PER_SCAN=100 VCELL_MAX_ODE_JOBS_PER_USER=100 VCELL_MAX_PDE_JOBS_PER_USER=40 -VCELL_WEB_DATA_PORT=55556 VCELL_SSH_CMD_TIMEOUT=10000 VCELL_SSH_CMD_RESTORE_TIMEOUT=5 @@ -235,6 +234,5 @@ VCELL_TAG=$VCELL_TAG VCELL_UPDATE_SITE=http://vcell.org/webstart/${_site_camel} VCELL_VERSION_NUMBER=$VCELL_VERSION_NUMBER VCELL_VERSION=${_site_camel}_Version_${VCELL_VERSION_NUMBER}_build_${VCELL_BUILD_NUMBER} -VCELL_WEB_DATA_PORT=$VCELL_WEB_DATA_PORT EOF diff --git a/docker/swarm/localconfig_realslurm_oracle_zeke.sh b/docker/swarm/localconfig_realslurm_oracle_zeke.sh index 2637816146..f546f4a555 100755 --- a/docker/swarm/localconfig_realslurm_oracle_zeke.sh +++ b/docker/swarm/localconfig_realslurm_oracle_zeke.sh @@ -148,7 +148,6 @@ VCELL_EXPORTDIR_HOST=/media/zeke/DiskDrive/Home/Work/CCAM/TempStorage/export VCELL_MAX_JOBS_PER_SCAN=100 VCELL_MAX_ODE_JOBS_PER_USER=100 VCELL_MAX_PDE_JOBS_PER_USER=40 -VCELL_WEB_DATA_PORT=55556 VCELL_SSH_CMD_TIMEOUT=10000 VCELL_SSH_CMD_RESTORE_TIMEOUT=5 @@ -257,6 +256,6 @@ VCELL_UPDATE_SITE=http://vcell.org/webstart/${_site_camel} VCELL_VERSION_NUMBER=$VCELL_VERSION_NUMBER VCELL_VERSION=${_site_camel}_Version_${VCELL_VERSION_NUMBER}_build_${VCELL_BUILD_NUMBER} -VCELL_WEB_DATA_PORT=$VCELL_WEB_DATA_PORT + EOF diff --git a/docker/swarm/localconfig_realslurm_postgres.sh b/docker/swarm/localconfig_realslurm_postgres.sh index 40e5802799..18b5fa1f3d 100755 --- a/docker/swarm/localconfig_realslurm_postgres.sh +++ b/docker/swarm/localconfig_realslurm_postgres.sh @@ -145,7 +145,6 @@ VCELL_EXPORTDIR_HOST=/Volumes/vcell/export/ VCELL_MAX_JOBS_PER_SCAN=100 VCELL_MAX_ODE_JOBS_PER_USER=100 VCELL_MAX_PDE_JOBS_PER_USER=40 -VCELL_WEB_DATA_PORT=55556 VCELL_SSH_CMD_TIMEOUT=10000 VCELL_SSH_CMD_RESTORE_TIMEOUT=5 @@ -235,6 +234,5 @@ VCELL_TAG=$VCELL_TAG VCELL_UPDATE_SITE=http://vcell.org/webstart/${_site_camel} VCELL_VERSION_NUMBER=$VCELL_VERSION_NUMBER VCELL_VERSION=${_site_camel}_Version_${VCELL_VERSION_NUMBER}_build_${VCELL_BUILD_NUMBER} -VCELL_WEB_DATA_PORT=$VCELL_WEB_DATA_PORT EOF diff --git a/docker/swarm/serverconfig-uch.sh b/docker/swarm/serverconfig-uch.sh index 5b852ee94a..b5a405c9d9 100755 --- a/docker/swarm/serverconfig-uch.sh +++ b/docker/swarm/serverconfig-uch.sh @@ -167,7 +167,6 @@ VCELL_EXPORTDIR_HOST=/opt/vcelldata/export/ VCELL_MAX_JOBS_PER_SCAN=100 VCELL_MAX_ODE_JOBS_PER_USER=100 VCELL_MAX_PDE_JOBS_PER_USER=40 -VCELL_WEB_DATA_PORT=55555 VCELL_SSH_CMD_TIMEOUT=10000 VCELL_SSH_CMD_RESTORE_TIMEOUT=5 @@ -258,6 +257,5 @@ VCELL_TAG=$VCELL_TAG VCELL_UPDATE_SITE=http://vcell.org/webstart/${_site_camel} VCELL_VERSION_NUMBER=$VCELL_VERSION_NUMBER VCELL_VERSION=${_site_camel}_Version_${VCELL_VERSION_NUMBER}_build_${VCELL_BUILD_NUMBER} -VCELL_WEB_DATA_PORT=$VCELL_WEB_DATA_PORT EOF diff --git a/vcell-core/src/main/java/cbit/vcell/resource/PropertyLoader.java b/vcell-core/src/main/java/cbit/vcell/resource/PropertyLoader.java index 0d65f24b1b..ccd75d0186 100644 --- a/vcell-core/src/main/java/cbit/vcell/resource/PropertyLoader.java +++ b/vcell-core/src/main/java/cbit/vcell/resource/PropertyLoader.java @@ -272,8 +272,6 @@ public static void setConfigProvider(VCellConfigProvider configProvider) { public static final String nagiosMonitorPort = record("test.monitor.port", ValueType.GEN); public static final String imageJVcellPluginURL = record("vcell.imagej.plugin.url", ValueType.GEN); - - public static final String webDataServerPort = record("vcelldata.web.server.port", ValueType.GEN); public static final String cmdSrvcSshCmdTimeoutMS = record("vcell.ssh.cmd.cmdtimeout", ValueType.GEN); public static final String cmdSrvcSshCmdRestoreTimeoutFactor = record("vcell.ssh.cmd.restoretimeout", ValueType.GEN); diff --git a/vcell-server/src/main/java/cbit/vcell/message/server/data/SimDataServer.java b/vcell-server/src/main/java/cbit/vcell/message/server/data/SimDataServer.java index eae689f455..4f55dce9d2 100644 --- a/vcell-server/src/main/java/cbit/vcell/message/server/data/SimDataServer.java +++ b/vcell-server/src/main/java/cbit/vcell/message/server/data/SimDataServer.java @@ -74,9 +74,6 @@ public SimDataServer() throws Exception { dataSetControllerImpl.addDataJobListener(this); // add export listener exportServiceImpl.addExportListener(this); - - SimDataWebService simDataWebService = new SimDataWebService(dataServerImpl); - simDataWebService.startWebService(); } public void init(SimDataServiceType serviceType) throws Exception { diff --git a/vcell-server/src/main/java/cbit/vcell/message/server/data/SimDataServerMain.java b/vcell-server/src/main/java/cbit/vcell/message/server/data/SimDataServerMain.java index 8c8fa9fd0b..0fb80a19ef 100644 --- a/vcell-server/src/main/java/cbit/vcell/message/server/data/SimDataServerMain.java +++ b/vcell-server/src/main/java/cbit/vcell/message/server/data/SimDataServerMain.java @@ -76,8 +76,7 @@ public static void main(String[] args) { PropertyLoader.exportBaseDirInternalProperty, PropertyLoader.simdataCacheSizeProperty, PropertyLoader.vcellapiKeystoreFile, - PropertyLoader.vcellapiKeystorePswdFile, - PropertyLoader.webDataServerPort + PropertyLoader.vcellapiKeystorePswdFile }; diff --git a/vcell-server/src/main/java/cbit/vcell/message/server/data/SimDataWebService.java b/vcell-server/src/main/java/cbit/vcell/message/server/data/SimDataWebService.java deleted file mode 100644 index 8306b2b01b..0000000000 --- a/vcell-server/src/main/java/cbit/vcell/message/server/data/SimDataWebService.java +++ /dev/null @@ -1,222 +0,0 @@ -package cbit.vcell.message.server.data; - -import cbit.vcell.resource.PropertyLoader; -import cbit.vcell.simdata.DataServerImpl; -import cbit.vcell.simdata.Hdf5Utils; -import cbit.vcell.solver.VCSimulationDataIdentifier; -import cbit.vcell.solver.VCSimulationIdentifier; -import cbit.vcell.solver.ode.ODESimData; -import cbit.vcell.util.ColumnDescription; -import ncsa.hdf.hdf5lib.H5; -import ncsa.hdf.hdf5lib.HDF5Constants; -import org.apache.commons.httpclient.URI; -import org.apache.http.*; -import org.apache.http.client.utils.URLEncodedUtils; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.bootstrap.HttpServer; -import org.apache.http.impl.bootstrap.ServerBootstrap; -import org.apache.http.protocol.HttpContext; -import org.apache.http.protocol.HttpRequestHandler; -import org.apache.http.ssl.SSLContexts; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.vcell.util.document.KeyValue; -import org.vcell.util.document.User; -import org.vcell.util.document.VCDataIdentifier; - -import javax.net.ssl.*; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.net.URL; -import java.net.URLDecoder; -import java.nio.charset.Charset; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.security.KeyStore; -import java.security.SecureRandom; -import java.util.*; - -public class SimDataWebService { - public static final Logger lg = LogManager.getLogger(SimDataWebService.class); - private HttpServer server; - private DataServerImpl dataServerImpl = null; - - public SimDataWebService(DataServerImpl dataServerImpl) { - this.dataServerImpl = dataServerImpl; - } - - - public void startWebService() { - try (InputStream inputStream = new FileInputStream(new File(PropertyLoader.getRequiredProperty(PropertyLoader.vcellapiKeystoreFile)))) { - final KeyStore serverKeyStore = KeyStore.getInstance("jks"); - String pwd = Files.readAllLines(new File(PropertyLoader.getRequiredProperty(PropertyLoader.vcellapiKeystorePswdFile)).toPath()).get(0); - serverKeyStore.load(inputStream, pwd.toCharArray()); - KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - keyManagerFactory.init(serverKeyStore, pwd.toCharArray()); - KeyManager[] serverKeyManagers = keyManagerFactory.getKeyManagers(); - TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - trustManagerFactory.init(serverKeyStore); - TrustManager[] serverTrustManagers = trustManagerFactory.getTrustManagers(); - final SSLContext sslContext = SSLContexts.createDefault(); - sslContext.init(serverKeyManagers, serverTrustManagers, new SecureRandom()); - int listenPort = Integer.parseInt(PropertyLoader.getRequiredProperty(PropertyLoader.webDataServerPort)); - server = ServerBootstrap.bootstrap().registerHandler("/simhdf5/*", new HttpRequestHandler() { - @Override - public void handle(HttpRequest request, HttpResponse response, HttpContext context) throws HttpException, IOException { - try { - URI uri = new URI(request.getRequestLine().getUri(),true); - final List parse = URLEncodedUtils.parse(uri.getQuery(), Charset.forName("utf-8")); - lg.info(uri.getQuery()); - lg.info(uri.getPath()); - final Path path = Paths.get(uri.getPath()); - final Iterator iterator = path.iterator(); - final String SIMDATADDF5 = "simhdf5"; - final String ODE = "ode"; - String simdataRoute = iterator.next().toString(); - String dataType = iterator.next().toString(); - if(simdataRoute.toLowerCase().equals(SIMDATADDF5) && dataType.toLowerCase().equals(ODE)) { - String simID = null; - TreeSet jobIDs = new TreeSet(); - String userKey = null; - String userid = null; - double blank = -1.0; - for(NameValuePair nvp:parse) { - if(nvp.getName().toLowerCase().equals("simid")) { - simID = nvp.getValue(); - }else if(nvp.getName().toLowerCase().equals("jobid")) { - String jobStr = URLDecoder.decode(nvp.getValue(), "UTF-8");// integer Separated by commas - StringTokenizer st = new StringTokenizer(jobStr,","); - while(st.hasMoreElements()) { - jobIDs.add(Integer.parseInt(st.nextToken())); - } - }else if(nvp.getName().toLowerCase().equals("userkey")) { - userKey = nvp.getValue(); - }else if(nvp.getName().toLowerCase().equals("userid")) { - userid = URLDecoder.decode(nvp.getValue(), "UTF-8"); - }else if(nvp.getName().toLowerCase().equals("blank")) { - blank = Double.parseDouble(nvp.getValue()); - } - } - User user = new User(userid,new KeyValue(userKey)); - VCSimulationIdentifier vcsid = new VCSimulationIdentifier(new KeyValue(simID), user); - File hdf5File = createHdf5(vcsid, jobIDs.toArray(new Integer[0]), blank); - - String exportBaseURL = PropertyLoader.getRequiredProperty(PropertyLoader.exportBaseURLProperty); - URL url = new URL(exportBaseURL + hdf5File.getName()); - response.setStatusCode(HttpStatus.SC_MOVED_TEMPORARILY); - response.addHeader("Location",url.toString()); - response.setEntity(null); - return; - } - response.setStatusCode(HttpStatus.SC_NOT_FOUND); - response.setEntity(new StringEntity("Not Found")); - } catch (Exception e) { - lg.error(e.getMessage(), e); - response.setStatusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR); - response.setEntity(new StringEntity(e.getMessage())); - - } - } - }).setListenerPort(listenPort).setSslContext(sslContext).create(); - server.start(); - - }catch(Exception e) { - lg.error(e.getMessage(), e); - } - } - - private File createHdf5(VCSimulationIdentifier vcsid,Integer[] scanJobs,double blankCellValue/*for Histogram*/) throws Exception{ - File hdf5TempFile = null; - int hdf5FileID = -1; - int jobGroupID = -1; - try { - String exportBaseDir = PropertyLoader.getRequiredProperty(PropertyLoader.exportBaseDirInternalProperty); - hdf5TempFile = File.createTempFile("webexport_"+vcsid.getSimulationKey()+"_", ".hdf", new File(exportBaseDir)); - hdf5FileID = H5.H5Fcreate(hdf5TempFile.getAbsolutePath(), HDF5Constants.H5F_ACC_TRUNC,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); - Hdf5Utils.insertAttribute(hdf5FileID, "simID",vcsid.getSimulationKey().toString());//Hdf5Utils.writeHDF5Dataset(hdf5FileID, "simID", null,vcsid.getSimulationKey().toString() , true); - Hdf5Utils.insertAttribute(hdf5FileID,"exportUser",vcsid.getOwner().getName()) ;//Hdf5Utils.writeHDF5Dataset(hdf5FileID, "exportUser", null,vcsid.getOwner().getName() , true); - - for(int scan=0;scan orderedColumnNames = new TreeSet(new Comparator() { - @Override - public int compare(ColumnDescription o1, ColumnDescription o2) { - if(o1.getName().equals("t") && o2.getName().equals("t")) {//'t' as first in the list - return 0; - }else if(o1.getName().equals("t")) { - return -1; - }else if(o2.getName().equals("t")) { - return 1; - } - return o1.getName().compareToIgnoreCase(o2.getName()); - }}); - orderedColumnNames.addAll(Arrays.asList(odeSimData.getColumnDescriptions())); - jobGroupID = (int) Hdf5Utils.createGroup(hdf5FileID, "Set "+scan); - //writeHDF5Dataset(hdf5FileID, "Set "+scan, null, null, false); - Hdf5Utils.HDF5WriteHelper help0 = Hdf5Utils.createDataset(jobGroupID, "data", new long[] {allColumnsCount,allRowsCount}); - //(HDF5WriteHelper) Hdf5Utils.writeHDF5Dataset(jobGroupID, "data", new long[] {allColumnsCount,allRowsCount}, new Object[] {}, false); - double[] fromData = new double[allColumnsCount*allRowsCount]; - int index = 0; - ArrayList dataTypes = new ArrayList(); - ArrayList dataIDs = new ArrayList(); - ArrayList dataShapes = new ArrayList(); - ArrayList dataLabels = new ArrayList(); - ArrayList dataNames = new ArrayList(); - Iterator columnNamesIterator = orderedColumnNames.iterator(); - while(columnNamesIterator.hasNext()) { - - ColumnDescription colDescr = columnNamesIterator.next(); - final String columnName = colDescr.getName(); - final int columnIndex = odeSimData.findColumn(columnName); - dataTypes.add("float64"); - dataIDs.add("data_set_"+columnName); - dataShapes.add(allRowsCount+""); - dataLabels.add(columnName); - dataNames.add(columnName/*name*/); - double[] columnData = odeSimData.extractColumn(columnIndex); - for(int myrows=0;myrows Date: Wed, 24 Apr 2024 08:52:13 -0400 Subject: [PATCH 03/16] remove unused Hdf5DataProcessingReaderNative.java --- .../Hdf5DataProcessingReaderNative.java | 696 ------------------ 1 file changed, 696 deletions(-) delete mode 100644 vcell-core/src/main/java/cbit/vcell/simdata/Hdf5DataProcessingReaderNative.java diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/Hdf5DataProcessingReaderNative.java b/vcell-core/src/main/java/cbit/vcell/simdata/Hdf5DataProcessingReaderNative.java deleted file mode 100644 index e023c0c261..0000000000 --- a/vcell-core/src/main/java/cbit/vcell/simdata/Hdf5DataProcessingReaderNative.java +++ /dev/null @@ -1,696 +0,0 @@ -package cbit.vcell.simdata; - -import cbit.vcell.math.VariableType; -import cbit.vcell.resource.NativeLib; -import cbit.vcell.solver.AnnotatedFunction; -import cbit.vcell.solver.Simulation; -import ncsa.hdf.object.*; -import ncsa.hdf.object.h5.H5ScalarDS; -import org.vcell.util.Extent; -import org.vcell.util.ISize; -import org.vcell.util.Origin; -import org.vcell.util.document.TSJobResultsNoStats; -import org.vcell.util.document.TimeSeriesJobResults; -import org.vcell.util.document.TimeSeriesJobSpec; - -import java.io.File; -import java.io.FileNotFoundException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; - -import static cbit.vcell.simdata.SimDataConstants.*; - - -public class Hdf5DataProcessingReaderNative { - - - public DataOperationResults.DataProcessingOutputInfo getDataProcessingOutput(DataOperation.DataProcessingOutputInfoOP infoOP, File dataProcessingOutputFileHDF5) throws Exception { - var outputInfo1 = (DataOperationResults.DataProcessingOutputInfo)getDataProcessingOutput_internal(infoOP, dataProcessingOutputFileHDF5); - Hdf5DataProcessingReaderPure hdf5DataProcessingReaderPure = new Hdf5DataProcessingReaderPure(); - var outputInfo2 = hdf5DataProcessingReaderPure.getDataProcessingOutput(infoOP, dataProcessingOutputFileHDF5); - if (outputInfo1.getVariableNames().length != outputInfo2.getVariableNames().length) { - throw new Exception("Variable names length mismatch"); - } - for (int i = 0; i < outputInfo1.getVariableNames().length; i++) { - if (!outputInfo1.getVariableNames()[i].equals(outputInfo2.getVariableNames()[i])) { - throw new Exception("Variable names mismatch"); - } - } - System.out.println("hello"); - return outputInfo1; - } - - - public DataOperationResults.DataProcessingOutputDataValues getDataProcessingOutput(DataOperation.DataProcessingOutputDataValuesOP dataValuesOp, File dataProcessingOutputFileHDF5) throws Exception { - var values1 = (DataOperationResults.DataProcessingOutputDataValues)getDataProcessingOutput_internal(dataValuesOp, dataProcessingOutputFileHDF5); - Hdf5DataProcessingReaderPure hdf5DataProcessingReaderPure = new Hdf5DataProcessingReaderPure(); - var values2 = hdf5DataProcessingReaderPure.getDataProcessingOutput(dataValuesOp, dataProcessingOutputFileHDF5); - if (values1.getDataValues().length != values2.getDataValues().length) { - throw new Exception("Data values length mismatch"); - } - return values1; - } - - - public DataOperationResults.DataProcessingOutputTimeSeriesValues getDataProcessingOutput(DataOperation.DataProcessingOutputTimeSeriesOP timeSeriesOp, File dataProcessingOutputFileHDF5) throws Exception { - var values1 = (DataOperationResults.DataProcessingOutputTimeSeriesValues)getDataProcessingOutput_internal(timeSeriesOp, dataProcessingOutputFileHDF5); - Hdf5DataProcessingReaderPure hdf5DataProcessingReaderPure = new Hdf5DataProcessingReaderPure(); - var values2 = hdf5DataProcessingReaderPure.getDataProcessingOutput(timeSeriesOp, dataProcessingOutputFileHDF5); - return values1; - } - - private DataOperationResults getDataProcessingOutput_internal(DataOperation dataOperation, File dataProcessingOutputFileHDF5) throws Exception { - NativeLib.HDF5.load(); - DataOperationResults dataProcessingOutputResults = null; - FileFormat hdf5FileFormat = null; - try{ - if (dataProcessingOutputFileHDF5.exists()) { - // retrieve an instance of H5File - FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); - if (fileFormat == null){ - throw new Exception("Cannot find HDF5 FileFormat."); - } - // open the file with read-only access - hdf5FileFormat = fileFormat.open(dataProcessingOutputFileHDF5.getAbsolutePath(), FileFormat.READ); - hdf5FileFormat.setMaxMembers(Simulation.MAX_LIMIT_SPATIAL_TIMEPOINTS); - // open the file and retrieve the file structure - hdf5FileFormat.open(); - Group root = (Group)((javax.swing.tree.DefaultMutableTreeNode)hdf5FileFormat.getRootNode()).getUserObject(); - if(dataOperation instanceof DataOperation.DataProcessingOutputInfoOP){ - DataSetControllerImpl.DataProcessingHelper dataProcessingHelper = new DataSetControllerImpl.DataProcessingHelper(); - iterateHDF5(root,"",dataProcessingHelper); - dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputInfo(dataOperation.getVCDataIdentifier(), - dataProcessingHelper.getVarNames(), - dataProcessingHelper.getVarISizes(), - dataProcessingHelper.times, - dataProcessingHelper.getVarUnits(), - dataProcessingHelper.getPostProcessDataTypes(), - dataProcessingHelper.getVarOrigins(), - dataProcessingHelper.getVarExtents(), - dataProcessingHelper.getVarStatValues()); - //map function names to PostProcess state variable name - ArrayList postProcessImageVarNames = new ArrayList(); - for (int i = 0; i < ((DataOperationResults.DataProcessingOutputInfo)dataProcessingOutputResults).getVariableNames().length; i++) { - String variableName = ((DataOperationResults.DataProcessingOutputInfo)dataProcessingOutputResults).getVariableNames()[i]; - if(((DataOperationResults.DataProcessingOutputInfo)dataProcessingOutputResults).getPostProcessDataType(variableName).equals(DataOperationResults.DataProcessingOutputInfo.PostProcessDataType.image)){ - postProcessImageVarNames.add(variableName); - } - } - HashMap mapFunctionNameToStateVarName = null; - if(((DataOperation.DataProcessingOutputInfoOP)dataOperation).getOutputContext() != null){ - mapFunctionNameToStateVarName = new HashMap(); - for (int i = 0; i < ((DataOperation.DataProcessingOutputInfoOP)dataOperation).getOutputContext().getOutputFunctions().length; i++) { - AnnotatedFunction annotatedFunction = ((DataOperation.DataProcessingOutputInfoOP)dataOperation).getOutputContext().getOutputFunctions()[i]; - if(annotatedFunction.getFunctionType().equals(VariableType.POSTPROCESSING)){ - String[] symbols = annotatedFunction.getExpression().flatten().getSymbols(); - //Find any PostProcess state var that matches a symbol in the function - for (int j = 0; j < symbols.length; j++) { - if(postProcessImageVarNames.contains(symbols[j])){ - mapFunctionNameToStateVarName.put(annotatedFunction.getName(), symbols[j]); - break; - } - } - } - } - } - if(mapFunctionNameToStateVarName != null && mapFunctionNameToStateVarName.size() > 0){ - dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputInfo(((DataOperationResults.DataProcessingOutputInfo)dataProcessingOutputResults),mapFunctionNameToStateVarName); - } - }else{ - OutputContext outputContext = dataOperation.getOutputContext(); - String[] variableNames = null; - DataOperation.DataProcessingOutputDataValuesOP.DataIndexHelper dataIndexHelper = null; - DataOperation.DataProcessingOutputDataValuesOP.TimePointHelper timePointHelper = null; - if(dataOperation instanceof DataOperation.DataProcessingOutputDataValuesOP){ - variableNames = new String[] {((DataOperation.DataProcessingOutputDataValuesOP)dataOperation).getVariableName()}; - dataIndexHelper = ((DataOperation.DataProcessingOutputDataValuesOP)dataOperation).getDataIndexHelper(); - timePointHelper = ((DataOperation.DataProcessingOutputDataValuesOP)dataOperation).getTimePointHelper(); - }else if(dataOperation instanceof DataOperation.DataProcessingOutputTimeSeriesOP){ - variableNames = ((DataOperation.DataProcessingOutputTimeSeriesOP)dataOperation).getTimeSeriesJobSpec().getVariableNames(); - TimeSeriesJobSpec timeSeriesJobSpec = ((DataOperation.DataProcessingOutputTimeSeriesOP)dataOperation).getTimeSeriesJobSpec(); - double[] specificTimepoints = extractTimeRange(((DataOperation.DataProcessingOutputTimeSeriesOP)dataOperation).getAllDatasetTimes(), timeSeriesJobSpec.getStartTime(), timeSeriesJobSpec.getEndTime()); - timePointHelper = DataOperation.DataProcessingOutputDataValuesOP.TimePointHelper.createSpecificTimePointHelper(specificTimepoints); - timeSeriesJobSpec.initIndices(); - dataIndexHelper = DataOperation.DataProcessingOutputDataValuesOP.DataIndexHelper.createSpecificDataIndexHelper(timeSeriesJobSpec.getIndices()[0]); - }else{ - throw new Exception("Unknown Dataoperation "+dataOperation.getClass().getName()); - } - if(variableNames.length != 1){ - throw new Exception("Only 1 variable request at a time"); - } - AnnotatedFunction[] annotatedFunctions = (outputContext==null?null:outputContext.getOutputFunctions()); - AnnotatedFunction foundFunction = null; - if(annotatedFunctions != null){ - for (int i = 0; i < annotatedFunctions.length; i++) { - if(annotatedFunctions[i].getName().equals(variableNames[0])){ - foundFunction = annotatedFunctions[i]; - break; - } - } - } - double[] alltimes = null; - if(foundFunction != null){ - DataOperationResults.DataProcessingOutputInfo dataProcessingOutputInfo = - getDataProcessingOutput(new DataOperation.DataProcessingOutputInfoOP(dataOperation.getVCDataIdentifier(),false,dataOperation.getOutputContext()), dataProcessingOutputFileHDF5); - alltimes = dataProcessingOutputInfo.getVariableTimePoints(); - DataSetControllerImpl.FunctionHelper functionHelper = DataSetControllerImpl.getPostProcessStateVariables(foundFunction, dataProcessingOutputInfo); - DataSetControllerImpl.DataProcessingHelper dataProcessingHelper = new DataSetControllerImpl.DataProcessingHelper(functionHelper.postProcessStateVars,timePointHelper,dataIndexHelper); - iterateHDF5(root,"",dataProcessingHelper); - dataProcessingOutputResults = - DataSetControllerImpl.evaluatePostProcessFunction(dataProcessingOutputInfo, functionHelper.postProcessStateVars, dataProcessingHelper.specificDataValues, - dataIndexHelper, timePointHelper, functionHelper.flattenedBoundExpression,variableNames[0]); - }else{ - DataSetControllerImpl.DataProcessingHelper dataProcessingHelper = - new DataSetControllerImpl.DataProcessingHelper(new String[] {variableNames[0]},timePointHelper,dataIndexHelper); - iterateHDF5(root,"",dataProcessingHelper); - alltimes = dataProcessingHelper.times; - if(dataProcessingHelper.specificDataValues == null){ - throw new Exception("Couldn't find postprocess data as specified for var="+variableNames[0]); - } - dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputDataValues(dataOperation.getVCDataIdentifier(), - variableNames[0],timePointHelper,dataIndexHelper, dataProcessingHelper.specificDataValues[0]); - } - if(dataOperation instanceof DataOperation.DataProcessingOutputTimeSeriesOP){ - TimeSeriesJobResults timeSeriesJobResults = null; - DataOperation.DataProcessingOutputTimeSeriesOP dataProcessingOutputTimeSeriesOP = (DataOperation.DataProcessingOutputTimeSeriesOP)dataOperation; - double[][] dataValues = ((DataOperationResults.DataProcessingOutputDataValues)dataProcessingOutputResults).getDataValues();//[time][data] - double[] desiredTimes = (timePointHelper.isAllTimePoints()?alltimes:timePointHelper.getTimePoints()); - double[][][] timeSeriesFormatedValuesArr = new double[variableNames.length][dataIndexHelper.getDataIndexes().length+1][desiredTimes.length]; - for (int i = 0; i < timeSeriesFormatedValuesArr.length; i++) {//var - for (int j = 0; j < timeSeriesFormatedValuesArr[i].length; j++) {//index - if(j==0){ - timeSeriesFormatedValuesArr[i][j] = desiredTimes; - continue; - } - for (int k = 0; k < timeSeriesFormatedValuesArr[i][j].length; k++) {//time - //assume 1 variable for now - timeSeriesFormatedValuesArr[i][j][k] = dataValues[k][j-1]; - } - } - } - - if(dataProcessingOutputTimeSeriesOP.getTimeSeriesJobSpec().isCalcSpaceStats()){ - DataSetControllerImpl.SpatialStatsInfo spatialStatsInfo = new DataSetControllerImpl.SpatialStatsInfo(); - spatialStatsInfo.bWeightsValid = false; - timeSeriesJobResults = - DataSetControllerImpl.calculateStatisticsFromWhole(dataProcessingOutputTimeSeriesOP.getTimeSeriesJobSpec(), timeSeriesFormatedValuesArr, timePointHelper.getTimePoints(), spatialStatsInfo); - }else{ - timeSeriesJobResults = - new TSJobResultsNoStats( - variableNames, - new int[][] {dataIndexHelper.getDataIndexes()}, - timePointHelper.getTimePoints(), - timeSeriesFormatedValuesArr); - } - dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputTimeSeriesValues(dataOperation.getVCDataIdentifier(), timeSeriesJobResults); - } - } - }else{ - throw new FileNotFoundException("Data Processing Output file '"+dataProcessingOutputFileHDF5.getPath()+"' not found"); - } - }catch(Exception e){ - DataSetControllerImpl.lg.error(e.getMessage(), e); - }finally{ - if(hdf5FileFormat != null){try{hdf5FileFormat.close();}catch(Exception e){ - DataSetControllerImpl.lg.error(e.getMessage(), e);}} - } - - return dataProcessingOutputResults; - } - - private static double[] extractTimeRange(double[] alltimes, double startTime, double stoptime){ - ArrayList selectedtimePointsList = new ArrayList(); - for (int i = 0; i < alltimes.length; i++) { - if(alltimes[i] >= startTime && alltimes[i] <= stoptime){ - selectedtimePointsList.add(alltimes[i]); - } - } - double[] selectedTimePoints = new double[selectedtimePointsList.size()]; - for (int j = 0; j < selectedtimePointsList.size(); j++) { - selectedTimePoints[j] = selectedtimePointsList.get(j); - } - return selectedTimePoints; - } - - - private static void iterateHDF5(HObject hObject, String indent, DataSetControllerImpl.DataProcessingHelper dataProcessingHelper) throws Exception{ - if(hObject instanceof Group){ - Group group = ((Group)hObject); - printInfo(group,indent); - if(group.getName().equals("/") || group.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_POSTPROCESSING)){ - List postProcessMembers = ((Group)hObject).getMemberList(); - for(HObject nextHObject:postProcessMembers){ - iterateHDF5(nextHObject, indent+" ", dataProcessingHelper); - } - }else if(group.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_VARIABLESTATISTICS) && dataProcessingHelper.isInfoOnly()){ - populateStatNamesAndUnits(hObject, dataProcessingHelper); - List statDataAtEachTime = group.getMemberList(); - dataProcessingHelper.statValues = new double[dataProcessingHelper.statVarNames.length][statDataAtEachTime.size()]; - for(HObject nextStatData:statDataAtEachTime){ - printInfo(nextStatData,indent+" "); - processDims(nextStatData, dataProcessingHelper,false);//always get stats data when ask for info - double[] stats = (double[])dataProcessingHelper.tempData; - int timeIndex = Integer.parseInt(nextStatData.getName().substring("time".length())); - for (int j = 0; j < stats.length; j++) { - dataProcessingHelper.statValues[j][timeIndex] = stats[j]; - } - } - }else{//must be image data - if(dataProcessingHelper.isInfoOnly()){ - dataProcessingHelper.imageNames = new ArrayList(); - dataProcessingHelper.imageISize = new ArrayList(); - dataProcessingHelper.imageOrigin = new ArrayList(); - dataProcessingHelper.imageExtent = new ArrayList(); - Origin imgDataOrigin; - Extent imgDataExtent; - HashMap attrHashMap = getHDF5Attributes(group); - if(attrHashMap.size() == 2){ - imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), 0, 0); - imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), 1, 1);//this is 1D, however the extentY, Z cannot take 0 - } - else if(attrHashMap.size() == 4){ - imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), 0); - imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), 1);//this is 2D, however the extentZ cannot take 0 - } - else if(attrHashMap.size() == 6){ - imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINZ))); - imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTZ))); - }else{ - throw new Exception("Unexpected number of origin/extent values"); - } - dataProcessingHelper.imageNames.add(hObject.getName()); - dataProcessingHelper.imageOrigin.add(imgDataOrigin); - dataProcessingHelper.imageExtent.add(imgDataExtent); - //get ISize - processDims((H5ScalarDS)(((Group)hObject).getMemberList()).get(0), dataProcessingHelper,true); - long[] dims = dataProcessingHelper.tempDims; - ISize isize = new ISize((int)dims[0], (int)(dims.length>1?dims[1]:1), (int)(dims.length>2?dims[2]:1)); - dataProcessingHelper.imageISize.add(isize); - }else{ - int currentVarNameIndex = -1; - for (int i = 0; i < dataProcessingHelper.specificVarNames.length; i++) { - if(group.getName().equals(dataProcessingHelper.specificVarNames[i])){ - currentVarNameIndex = i; - break; - } - } - if(currentVarNameIndex == -1){ - return;//skip this group - } - dataProcessingHelper.specificDataValues[currentVarNameIndex] = new double[(dataProcessingHelper.specificTimePointHelper.isAllTimePoints()?dataProcessingHelper.times.length:dataProcessingHelper.specificTimePointHelper.getTimePoints().length)][]; - List imageDataAtEachTime = ((Group)hObject).getMemberList(); - int foundTimePointIndex = 0; - for(HObject nextImageData:imageDataAtEachTime){ -// if(dataProcessingHelper.isInfoOnly()){ -// printInfo(nextImageData,indent+" "); -// processDims(nextImageData, dataProcessingHelper,true); -// long[] dims = dataProcessingHelper.tempDims; -// ISize isize = new ISize((int)dims[0], (int)(dims.length>1?dims[1]:1), (int)(dims.length>2?dims[2]:1)); -// dataProcessingHelper.imageISize.add(isize); -// break;//only need 1st one for info -// }else{ - int hdf5GroupTimeIndex = Integer.parseInt(nextImageData.getName().substring(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMEPREFIX.length())); - if(dataProcessingHelper.specificTimePointHelper.isAllTimePoints() || dataProcessingHelper.specificTimePointHelper.getTimePoints()[foundTimePointIndex] == dataProcessingHelper.times[hdf5GroupTimeIndex]){ - - int timeIndex = (dataProcessingHelper.specificTimePointHelper.isAllTimePoints()?hdf5GroupTimeIndex:foundTimePointIndex); - processDims(nextImageData, dataProcessingHelper,false); - long[] dims = dataProcessingHelper.tempDims; - ISize isize = new ISize((int)dims[0], (int)(dims.length>1?dims[1]:1), (int)(dims.length>2?dims[2]:1)); - if(dataProcessingHelper.specificDataIndexHelper.isAllDataIndexes()){ - dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex] = (double[])dataProcessingHelper.tempData; - }else if(dataProcessingHelper.specificDataIndexHelper.isSingleSlice()){ - dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex] = new double[isize.getX()*isize.getY()]; - System.arraycopy( - (double[])dataProcessingHelper.tempData,dataProcessingHelper.specificDataIndexHelper.getSliceIndex()*(isize.getX()*isize.getY()), - dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex], 0, isize.getX()*isize.getY()); - }else{ - dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex] = new double[dataProcessingHelper.specificDataIndexHelper.getDataIndexes().length]; - for (int i = 0; i < dataProcessingHelper.specificDataIndexHelper.getDataIndexes().length; i++) { - dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex][i] = ((double[])dataProcessingHelper.tempData)[dataProcessingHelper.specificDataIndexHelper.getDataIndexes()[i]]; - } - } - foundTimePointIndex++; - if(!dataProcessingHelper.specificTimePointHelper.isAllTimePoints() && foundTimePointIndex == dataProcessingHelper.specificTimePointHelper.getTimePoints().length){ - //break out after we get our data - break; - } - } - -// } - } - } - } - }else if(hObject instanceof Dataset){ - Dataset dataset = (Dataset)hObject; - printInfo(dataset,indent); - if(dataset.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMES)){ - processDims(hObject, dataProcessingHelper,false); - dataProcessingHelper.times = (double[])dataProcessingHelper.tempData; - } - }else if(hObject instanceof Datatype){ - printInfo(hObject, indent); - }else{ - printInfo(hObject, indent); - } - } - private static HashMap getHDF5Attributes(HObject hObject) throws Exception{ - HashMap attrHashMap = new HashMap(); - List metaDataL = hObject.getMetadata(); - if(metaDataL != null){ - for (int j = 0; j < metaDataL.size(); j++) { - Attribute attr = (Attribute)metaDataL.get(j); - String attrValue = attr.toString(","); - //System.out.print(" "+attr.getName()+"='"+attrValue+"'"); - attrHashMap.put(attr.getName(),attr.toString(",")); - } - } - return attrHashMap; - } - - private static void printInfo(HObject hObject,String indent) throws Exception{ - if(true){return;} - System.out.println(indent+hObject.getName()+":"+hObject.getClass().getName()); - List metaDatas = hObject.getMetadata(); - for(Object metaData:metaDatas){ - if(metaData instanceof Attribute){ - Attribute attribute = (Attribute)metaData; - System.out.println(indent+"metadata="+attribute.getName()+" "+attribute.getType().getDatatypeDescription()); - }else{ - System.out.println(indent+"metadata="+metaData.getClass().getName()); - } - } - } - private static void processDims(HObject hObject, DataSetControllerImpl.DataProcessingHelper dataProcessingHelper, boolean bInfoOnly) throws Exception{ - H5ScalarDS h5ScalarDS = (H5ScalarDS)hObject; - h5ScalarDS.init(); - dataProcessingHelper.tempDims = h5ScalarDS.getDims(); - - //make sure all dimensions are selected for loading if 3D - //note: for 3D, only 1st slice selected by default - long[] selectedDims = h5ScalarDS.getSelectedDims(); - if(selectedDims != null && selectedDims.length > 2){ - //changes internal class variable used during read - selectedDims[2] = dataProcessingHelper.tempDims[2]; - } - if(!bInfoOnly){ - //load all data - dataProcessingHelper.tempData = h5ScalarDS.read(); - } - - if(dataProcessingHelper.tempDims != null){ - if(dataProcessingHelper.tempDims.length > 1){ - //For HDF5View (x stored in index 1) and (y stored in index 0) so must switch back to normal assumption - long dimsY = dataProcessingHelper.tempDims[0]; - dataProcessingHelper.tempDims[0] = dataProcessingHelper.tempDims[1]; - dataProcessingHelper.tempDims[1] = dimsY; - } -// //uncomment for Debug -// System.out.print(" dims=("); -// for (int j = 0; j < dataProcessingHelper.tempDims.length; j++) { -// System.out.print((j>0?"x":"")+dataProcessingHelper.tempDims[j]); -// } -// System.out.print(")"); - } - } - private static void populateStatNamesAndUnits(HObject hObject, DataSetControllerImpl.DataProcessingHelper dataProcessingHelper) throws Exception{ - if(!hObject.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_VARIABLESTATISTICS)){ - throw new Exception("expecting obejct name "+SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_VARIABLESTATISTICS); - } - final String NAME_ATTR = "_name"; - final String UNIT_ATTR = "_unit"; - final String STAT_PREFIX = "comp_"; - - List metaDataL = hObject.getMetadata(); - if(metaDataL != null){ - HashMap attrHashMap = getHDF5Attributes(hObject);//map contains the same number of names and attributes - String[] variableStatNames = null; - String[] variableUnits = null; - Iterator attrIterTemp = attrHashMap.keySet().iterator(); - boolean bHasUnit = false; - for (int j = 0; j < attrHashMap.size(); j++) { - String compVal = attrIterTemp.next(); - if(compVal.contains(NAME_ATTR) || compVal.contains(UNIT_ATTR)){ - bHasUnit = true; - break; - } - } - if(bHasUnit){ - variableStatNames = new String[attrHashMap.size()/2]; - variableUnits = new String[attrHashMap.size()/2]; - }else{ - variableStatNames = new String[attrHashMap.size()]; // old way - } - Iterator attrIter = attrHashMap.keySet().iterator(); - for (int j = 0; j < attrHashMap.size(); j++) { - String compVal = attrIter.next(); - if(compVal.contains(NAME_ATTR)){ - int compVarIdx = Integer.parseInt(compVal.substring(STAT_PREFIX.length(), compVal.indexOf('_', STAT_PREFIX.length()))); - variableStatNames[compVarIdx] = attrHashMap.get(compVal); - }else if(compVal.contains(UNIT_ATTR)){ - int compVarIdx = Integer.parseInt(compVal.substring(STAT_PREFIX.length(), compVal.indexOf('_', STAT_PREFIX.length()))); - variableUnits[compVarIdx] = attrHashMap.get(compVal); - }else{//old way for var names(e.g. comp_0 = abc) with no "_name" or "_unit" - int compVarIdx = Integer.parseInt(compVal.substring(STAT_PREFIX.length())); - variableStatNames[compVarIdx] = attrHashMap.get(compVal); - } - } - dataProcessingHelper.statVarNames = variableStatNames; - dataProcessingHelper.statVarUnits = variableUnits; - } - } - - //uncomment it for Debug -//private static String DATASETNAME = "/"; -//enum H5O_type { -// H5O_TYPE_UNKNOWN(-1), // Unknown object type -// H5O_TYPE_GROUP(0), // Object is a group -// H5O_TYPE_DATASET(1), // Object is a dataset -// H5O_TYPE_NAMED_DATATYPE(2), // Object is a named data type -// H5O_TYPE_NTYPES(3); // Number of different object types -// private static final Map lookup = new HashMap(); -// -// static { -// for (H5O_type s : EnumSet.allOf(H5O_type.class)) -// lookup.put(s.getCode(), s); -// } -// -// private int code; -// -// H5O_type(int layout_type) { -// this.code = layout_type; -// } -// -// public int getCode() { -// return this.code; -// } -// -// public static H5O_type get(int code) { -// return lookup.get(code); -// } -//} -// -//public static void do_iterate(File hdfFile) { -// int file_id = -1; -// -// // Open a file using default properties. -// try { -// file_id = H5.H5Fopen(hdfFile.getAbsolutePath(), HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); -// } -// catch (Exception e) { -// lg.error(e); -// } -// -// // Begin iteration. -// System.out.println("Objects in root group:"); -// try { -// if (file_id >= 0) { -// int count = (int)H5.H5Gn_members(file_id, DATASETNAME); -// String[] oname = new String[count]; -// int[] otype = new int[count]; -// int[] ltype = new int[count]; -// long[] orefs = new long[count]; -// H5.H5Gget_obj_info_all(file_id, DATASETNAME, oname, otype, ltype, orefs, HDF5Constants.H5_INDEX_NAME); -// -// // Get type of the object and display its name and type. -// for (int indx = 0; indx < otype.length; indx++) { -// switch (H5O_type.get(otype[indx])) { -// case H5O_TYPE_GROUP: -// System.out.println(" Group: " + oname[indx]); -// break; -// case H5O_TYPE_DATASET: -// System.out.println(" Dataset: " + oname[indx]); -// break; -// case H5O_TYPE_NAMED_DATATYPE: -// System.out.println(" Datatype: " + oname[indx]); -// break; -// default: -// System.out.println(" Unknown: " + oname[indx]); -// } -// } -// } -// } -// catch (Exception e) { -// lg.error(e); -// } -// -// // Close the file. -// try { -// if (file_id >= 0) -// H5.H5Fclose(file_id); -// } -// catch (Exception e) { -// lg.error(e); -// } -//} - -//public static void populateHDF5(Group g, String indent,DataProcessingOutput0 dataProcessingOutput,boolean bVarStatistics,String imgDataName,Origin imgDataOrigin,Extent imgDataExtent) throws Exception -//{ -// if (g == null) -// return; -// -// List members = g.getMemberList(); -// -// int n = members.size(); -// indent += " "; -// HObject obj = null; -// -// String nameAtt = "_name"; -// String unitAtt = "_unit"; -// for (int i=0; i 2){ -// //changes internal class variable used during read -// selectedDims[2] = dims[2]; -// } -// -// //load all data -// Object data = h5ScalarDS.read(); -// -// if(dims != null){ -// if(dims.length > 1){ -// //For HDF5View (x stored in index 1) and (y stored in index 0) so must switch back to normal assumption -// long dimsY = dims[0]; -// dims[0] = dims[1]; -// dims[1] = dimsY; -// } -// //uncomment for Debug -// /*System.out.print(" dims=("); -// for (int j = 0; j < dims.length; j++) { -// System.out.print((j>0?"x":"")+dims[j]); -// } -// System.out.print(")");*/ -// } -// -//// System.out.print(" len="+times.length); -// if(obj.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMES)){ -// double[] times = (double[])data; -// dataProcessingOutput.setTimes(times); -// }else if(bVarStatistics){ -// double[] stats = (double[])data; -// int timeIndex = Integer.parseInt(obj.getName().substring("time".length())); -// for (int j = 0; j < stats.length; j++) { -// dataProcessingOutput.getVariableStatValues()[j][timeIndex] = stats[j]; -// } -// }else{ -// double min = ((double[])data)[0]; -// double max = min; -// for (int j = 0; j < ((double[])data).length; j++) { -// min = Math.min(min, ((double[])data)[j]); -// max = Math.max(max, ((double[])data)[j]); -// } -// int xSize = (int)dims[0]; -// int ySize = (int)(dims.length>1?dims[1]:1); -// int zSize = (int)(dims.length>2?dims[2]:1); -// SourceDataInfo sourceDataInfo = -// new SourceDataInfo(SourceDataInfo.RAW_VALUE_TYPE, (double[])data, (imgDataExtent==null?new Extent(1,1,1):imgDataExtent), (imgDataOrigin==null?null:imgDataOrigin), new Range(min, max), 0, xSize, 1, ySize, xSize, zSize, xSize*ySize); -// Vector otherData = dataProcessingOutput.getDataGenerators().get(imgDataName); -// int timeIndex = Integer.parseInt(obj.getName().substring(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMEPREFIX.length())); -// otherData.add(sourceDataInfo); -// if(otherData.size()-1 != timeIndex){ -// throw new Exception("Error HDF5 parse: added data index does not match timeIndex"); -// } -// } -// }else if (obj instanceof H5Group && !obj.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_POSTPROCESSING)){ -// bVarStatistics = false; -// imgDataName = obj.getName(); -// dataProcessingOutput.getDataGenerators().put(imgDataName, new Vector()); -// -// List metaDataL = obj.getMetadata(); -// if(metaDataL != null){//assume 6 attributes defining origin and extent -// HashMap attrHashMap = getHDF5Attributes(obj); -// if(attrHashMap.size() == 2){ -// imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), 0, 0); -// imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), 1, 1);//this is 1D, however the extentY, Z cannot take 0 -// } -// else if(attrHashMap.size() == 4){ -// imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), 0); -// imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), 1);//this is 2D, however the extentZ cannot take 0 -// } -// else if(attrHashMap.size() == 6){ -// imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINZ))); -// imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTZ))); -// } -// } -// -// } -// System.out.println(); -// -// if (obj instanceof Group) -// { -// populateHDF5((Group)obj, indent,dataProcessingOutput,bVarStatistics,imgDataName,imgDataOrigin,imgDataExtent); -// } -// } -//} - -} From 4abe0fefe6940462743f60dff2fa45b5de03f03e Mon Sep 17 00:00:00 2001 From: Jim Schaff Date: Wed, 24 Apr 2024 10:13:09 -0400 Subject: [PATCH 04/16] remove unused ChomboSimpleDataViewer --- .../client/data/ChomboSimpleDataViewer.java | 1046 ----------------- 1 file changed, 1046 deletions(-) delete mode 100644 vcell-client/src/main/java/cbit/vcell/client/data/ChomboSimpleDataViewer.java diff --git a/vcell-client/src/main/java/cbit/vcell/client/data/ChomboSimpleDataViewer.java b/vcell-client/src/main/java/cbit/vcell/client/data/ChomboSimpleDataViewer.java deleted file mode 100644 index 8dc567225b..0000000000 --- a/vcell-client/src/main/java/cbit/vcell/client/data/ChomboSimpleDataViewer.java +++ /dev/null @@ -1,1046 +0,0 @@ -package cbit.vcell.client.data; - -import java.awt.BorderLayout; -import java.awt.Color; -import java.awt.Component; -import java.awt.FlowLayout; -import java.awt.GridBagConstraints; -import java.awt.GridBagLayout; -import java.awt.Insets; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.awt.event.MouseAdapter; -import java.awt.event.MouseEvent; -import java.io.File; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Hashtable; -import java.util.List; -import java.util.Set; -import java.util.StringTokenizer; - -import javax.swing.BorderFactory; -import javax.swing.DefaultComboBoxModel; -import javax.swing.DefaultListCellRenderer; -import javax.swing.DefaultListModel; -import javax.swing.JButton; -import javax.swing.JComboBox; -import javax.swing.JFileChooser; -import javax.swing.JFrame; -import javax.swing.JLabel; -import javax.swing.JList; -import javax.swing.JOptionPane; -import javax.swing.JPanel; -import javax.swing.JPasswordField; -import javax.swing.JScrollPane; -import javax.swing.JTabbedPane; -import javax.swing.JTable; -import javax.swing.JTextField; -import javax.swing.ListSelectionModel; -import javax.swing.event.ListSelectionEvent; -import javax.swing.event.ListSelectionListener; -import javax.swing.table.AbstractTableModel; -import javax.swing.table.DefaultTableCellRenderer; - -import org.vcell.util.gui.GeneralGuiUtils; -import org.vcell.util.UserCancelException; -import org.vcell.util.document.KeyValue; -import org.vcell.util.document.User; -import org.vcell.util.gui.DialogUtils; - -import com.lowagie.text.Font; - -import cbit.gui.TextFieldAutoCompletion; -import cbit.vcell.client.VCellLookAndFeel; -import cbit.vcell.client.constants.GuiConstants; -import cbit.vcell.client.task.AsynchClientTask; -import cbit.vcell.client.task.ClientTaskDispatcher; -import cbit.vcell.simdata.DataSetIdentifier; -import cbit.vcell.simdata.SimDataConstants; -import cbit.vcell.simdata.SimulationDataSpatialHdf5; -import cbit.vcell.simdata.SimulationDataSpatialHdf5.SimDataSet; -import cbit.vcell.solver.VCSimulationDataIdentifier; -import cbit.vcell.solver.VCSimulationIdentifier; - -public class ChomboSimpleDataViewer extends JFrame { - - private static class MeshMetricsTableModel extends AbstractTableModel - { - private String[] cols = new String[0]; - private List values = new ArrayList(); - - @Override - public int getRowCount() { - return values.size(); - } - - @Override - public int getColumnCount() { - return cols.length; - } - -// private boolean isIndexColumn(int columnIndex) -// { -// String col = cols.get(columnIndex); -// return col.equalsIgnoreCase("i") || col.equalsIgnoreCase("j") -// || col.equalsIgnoreCase("k") || col.equalsIgnoreCase("index"); -// } - @Override - public Object getValueAt(int rowIndex, int columnIndex) { - Number d = values.get(rowIndex)[columnIndex]; - return d; - } - - @Override - public String getColumnName(int column) { - return cols[column]; - } - - public void setData(String[] cols, List values) - { - this.cols = cols; - this.values = values; - fireTableDataChanged(); - } - - public void refreshTable() - { - fireTableStructureChanged(); - fireTableDataChanged(); - } - - public void clear() - { - cols = new String[0]; - values.clear(); - refreshTable(); - } - } - - private static class SolTableCellRenderer extends DefaultTableCellRenderer - { - - @Override - public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, - int column) { - // TODO Auto-generated method stub - Component label = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, - row, column); - setForeground(Color.black); - if (!isSelected) - { - if (value instanceof Number) - { - if (((Number) value).doubleValue() == SimDataConstants.BASEFAB_REAL_SETVAL) - { - setForeground(Color.gray); - } - } - } - return label; - } - - } - - private static class SolTableModel extends AbstractTableModel - { - private static final int COL_INDEX = 0; - private static final int COL_VALUE = 1; - private final static String[] cols = {"index", "value"}; - private double[] values = null; - - @Override - public int getRowCount() { - return values == null ? 0 : values.length; - } - - @Override - public int getColumnCount() { - return cols.length; - } - - @Override - public Class getColumnClass(int columnIndex) { - return Number.class; - } - - @Override - public Object getValueAt(int rowIndex, int columnIndex) { - if (columnIndex == COL_INDEX) - { - return rowIndex; - } - return values[rowIndex]; - } - - @Override - public String getColumnName(int column) { - return cols[column]; - } - - public void setValues(double[] v) - { - values = v; - fireTableDataChanged(); - } - - public void clear() - { - setValues(new double[0]); - } - } - - private static class TimePlotTableModel extends AbstractTableModel - { - private static final int COL_TIME = 0; - private static final int COL_VALUE = 1; - private final static String[] cols = {"time", "value"}; - private double[] values = null; - private double[] times = null; - - @Override - public int getRowCount() { - return values == null ? 0 : values.length; - } - - @Override - public int getColumnCount() { - return cols.length; - } - - @Override - public Object getValueAt(int rowIndex, int columnIndex) { - return columnIndex == COL_TIME ? times[rowIndex] : values[rowIndex]; - } - - @Override - public String getColumnName(int column) { - return cols[column]; - } - - public void setTimesAndValues(double[] t, double[] v) - { - times = t; - values = v; - fireTableDataChanged(); - } - public void clear() - { - setTimesAndValues(new double[0], new double[0]); - } - } - private class EventListener implements ActionListener, ListSelectionListener - { - @Override - public void actionPerformed(ActionEvent e) { - - if (e.getSource() == resetButton) - { - reset(); - } - else if (e.getSource() == okButton) - { - retrieveVariablesAndTimes(); - } - else if (e.getSource() == timeComboBox) - { - retrieveData(); - } - else if (e.getSource() == timePlotButton) - { - retrieveTimePlot(); - } - else if (e.getSource() == exitButton) - { - System.exit(0); - } - } - - @Override - public void valueChanged(ListSelectionEvent e) { - if (e.getValueIsAdjusting()) - { - return; - } - if (e.getSource() == varList) - { - retrieveData(); - } - else if (e.getSource() == solTable.getSelectionModel()) - { - timePlotButton.setEnabled(solTable.getSelectedRowCount() == 1); - } - } - } - - private JPanel mainPanel = new JPanel(); - private JList varList = new JList(); - private JButton okButton = new JButton("Go"); - private JButton resetButton = new JButton("Reset"); - private JButton timePlotButton = new JButton("Time Plot"); - private JButton exitButton = new JButton("Exit"); - private JTable solTable= new JTable(); - private SolTableModel solTableModel = new SolTableModel(); - private JTable timePlotTable= new JTable(); - private TimePlotTableModel timePlotTableModel = new TimePlotTableModel(); - private TextFieldAutoCompletion dataDirTextField = new TextFieldAutoCompletion(); - private TextFieldAutoCompletion userNameTextField = new TextFieldAutoCompletion(); - private TextFieldAutoCompletion simIdField = new TextFieldAutoCompletion(); -// private JPasswordField remotePasswordField = new JPasswordField(); - private JComboBox timeComboBox = new JComboBox(); - private JLabel solLabel = new JLabel("Solution"); - private JLabel timePlotLabel = new JLabel("Time Plot"); - private SimulationDataSpatialHdf5 simData = null; - private EventListener listener = new EventListener(); - private Set simIds = new HashSet(); - private Set usernames = new HashSet(); - private Set datadirs = new HashSet(); - private JTabbedPane dataTabbedPane = new JTabbedPane(); - private JTable meshMetricsTable = new JTable(); - private MeshMetricsTableModel meshMetricsTableModel = new MeshMetricsTableModel(); - private static boolean debug = false; - private JPanel timePlotPanel; - private JTextField maxErrorTextField = new JTextField(); - private JTextField l2ErrorTextField = new JTextField(); - private JTextField meanTextField = new JTextField(); - private JTextField sumVolFracTextField = new JTextField(); - private JPanel errorPanel = null; - - private ChomboSimpleDataViewer() - { - setTitle("Chombo Simple Data Viewer"); - setDefaultCloseOperation(EXIT_ON_CLOSE); - initialize(); - } - - private JPanel getErrorPanel() - { - if (errorPanel == null) - { - errorPanel = new JPanel(); - errorPanel.setLayout(new GridBagLayout()); - errorPanel.setBorder(GuiConstants.TAB_PANEL_BORDER); - - meanTextField.setEditable(false); - sumVolFracTextField.setEditable(false); - maxErrorTextField.setEditable(false); - l2ErrorTextField.setEditable(false); - - GridBagConstraints gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = 0; - gbc.insets = new Insets(2, 2, 2, 2); - JLabel label = new JLabel("Mean"); - label.setFont(label.getFont().deriveFont(Font.BOLD)); - errorPanel.add(label, gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 1; - gbc.gridy = 0; - gbc.fill = GridBagConstraints.HORIZONTAL; - gbc.weightx = 1.0; - gbc.insets = new Insets(2, 2, 2, 2); - errorPanel.add(meanTextField, gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 2; - gbc.gridy = 0; - gbc.insets = new Insets(2, 2, 2, 2); - label = new JLabel("Vol Frac Sum"); - label.setFont(label.getFont().deriveFont(Font.BOLD)); - errorPanel.add(label, gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 3; - gbc.gridy = 0; - gbc.fill = GridBagConstraints.HORIZONTAL; - gbc.weightx = 1.0; - gbc.insets = new Insets(2, 2, 2, 2); - errorPanel.add(sumVolFracTextField, gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = 1; - gbc.insets = new Insets(2, 2, 2, 2); - label = new JLabel("Max Error"); - label.setFont(label.getFont().deriveFont(Font.BOLD)); - errorPanel.add(label, gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 1; - gbc.gridy = 1; - gbc.fill = GridBagConstraints.HORIZONTAL; - gbc.weightx = 1.0; - gbc.insets = new Insets(2, 2, 2, 2); - errorPanel.add(maxErrorTextField, gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 2; - gbc.gridy = 1; - gbc.insets = new Insets(2, 2, 2, 2); - label = new JLabel("Relative L2 Error"); - label.setFont(label.getFont().deriveFont(Font.BOLD)); - errorPanel.add(label, gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 3; - gbc.gridy = 1; - gbc.fill = GridBagConstraints.HORIZONTAL; - gbc.weightx = 1.0; - gbc.insets = new Insets(2, 2, 2, 2); - errorPanel.add(l2ErrorTextField, gbc); - } - - return errorPanel; - } - - private JPanel createSolPanel() - { - JPanel solPanel = new JPanel(new GridBagLayout()); - - int gridy = 0; - GridBagConstraints gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.weightx = 1; - gbc.fill = GridBagConstraints.HORIZONTAL; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.anchor = GridBagConstraints.LINE_START; - solLabel.setFont(solLabel.getFont().deriveFont(Font.BOLD)); - solPanel.add(solLabel, gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 1; - gbc.gridy = gridy; - gbc.insets = new Insets(2, 2, 2, 10); - gbc.anchor = GridBagConstraints.LINE_END; - solPanel.add(timePlotButton, gbc); - - ++ gridy; - gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.gridwidth = GridBagConstraints.REMAINDER; - gbc.weightx = 1.0; - gbc.weighty = 1.0; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.fill = GridBagConstraints.BOTH; - solPanel.add(new JScrollPane(solTable), gbc); - - ++ gridy; - gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.weightx = 1.0; - gbc.gridwidth = GridBagConstraints.REMAINDER; - gbc.fill = GridBagConstraints.HORIZONTAL; - gbc.insets = new Insets(2, 2, 2, 2); - solPanel.add(getErrorPanel(), gbc); - - return solPanel; - } - - JFileChooser jFileChooser; - private ActionListener dataBrowseActionListener = new ActionListener() { - @Override - public void actionPerformed(ActionEvent e) { - if(jFileChooser == null){ - jFileChooser = new JFileChooser(); -// jFileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); - } - int returnVal = jFileChooser.showOpenDialog(ChomboSimpleDataViewer.this); - - if (returnVal == JFileChooser.APPROVE_OPTION){ - File selectedFile = jFileChooser.getSelectedFile(); - String formattedName = null; - String userName = null; - if(selectedFile.isFile() && selectedFile.getName().startsWith("SimID_")){ - formattedName = selectedFile.getName(); - userName = selectedFile.getParentFile().getName(); - dataDirTextField.setText(jFileChooser.getSelectedFile().getParentFile().getParentFile().getAbsolutePath()); - }else{ - dataDirTextField.setText(jFileChooser.getSelectedFile().getAbsolutePath()); - } - if(formattedName != null){ - StringTokenizer st = new StringTokenizer(formattedName,"_"); - st.nextToken(); - simIdField.setText(st.nextToken()); - if(userName != null){ - userNameTextField.setText(userName); - } - } - } else { - return; - } - } - }; - private JPanel createInputPanel() - { - JPanel inputPanel = new JPanel(new GridBagLayout()); - inputPanel.setBorder(BorderFactory.createTitledBorder(GuiConstants.TAB_PANEL_BORDER, "Input")); - int gridy = 0; - GridBagConstraints gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.anchor = GridBagConstraints.LINE_END; - JLabel label = new JLabel("User"); - label.setFont(label.getFont().deriveFont(Font.BOLD)); - inputPanel.add(label, gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 1; - gbc.gridy = gridy; - gbc.weightx = 1; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.fill = GridBagConstraints.HORIZONTAL; - inputPanel.add(userNameTextField, gbc); - - gridy ++; - gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.anchor = GridBagConstraints.LINE_END; - JButton dataBrowseButton = new JButton("Data Dir"); - dataBrowseButton.setFont(label.getFont().deriveFont(Font.BOLD)); - inputPanel.add(dataBrowseButton, gbc); - dataBrowseButton.addActionListener(dataBrowseActionListener); - - gbc = new GridBagConstraints(); - gbc.gridx = 1; - gbc.gridy = gridy; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.fill = GridBagConstraints.HORIZONTAL; - inputPanel.add(dataDirTextField, gbc); - - gridy ++; - gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.anchor = GridBagConstraints.LINE_END; - label = new JLabel("Sim ID"); - label.setFont(label.getFont().deriveFont(Font.BOLD)); - inputPanel.add(label, gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 1; - gbc.gridy = gridy; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.fill = GridBagConstraints.HORIZONTAL; - inputPanel.add(simIdField, gbc); - -// gridy ++; -// gbc = new GridBagConstraints(); -// gbc.gridx = 0; -// gbc.gridy = gridy; -// gbc.insets = new Insets(2, 2, 2, 2); -// gbc.anchor = GridBagConstraints.LINE_END; -// label = new JLabel("RmtPW"); -// label.setFont(label.getFont().deriveFont(Font.BOLD)); -// inputPanel.add(label, gbc); -// -// gbc = new GridBagConstraints(); -// gbc.gridx = 1; -// gbc.gridy = gridy; -// gbc.insets = new Insets(2, 2, 2, 2); -// gbc.fill = GridBagConstraints.HORIZONTAL; -// inputPanel.add(remotePasswordField, gbc); - - gridy ++; - JPanel panel1 = new JPanel(new FlowLayout(FlowLayout.RIGHT)); - panel1.add(resetButton); - panel1.add(okButton); - gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.gridwidth = 2; - gbc.weightx = 0.2; - gbc.fill = GridBagConstraints.HORIZONTAL; - gbc.insets = new Insets(2, 2, 2, 2); - inputPanel.add(panel1, gbc); - return inputPanel; - } - - private JPanel createSelectionPanel() - { - JPanel selectionPanel = new JPanel(new GridBagLayout()); - selectionPanel.setBorder(GuiConstants.TAB_PANEL_BORDER); - int gridy = 0; - GridBagConstraints gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.anchor = GridBagConstraints.LINE_END; - JLabel label = new JLabel("Time"); - label.setFont(label.getFont().deriveFont(Font.BOLD)); - selectionPanel.add(label, gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 1; - gbc.gridy = gridy; - gbc.weightx = 1.0; - gbc.anchor = GridBagConstraints.LINE_START; - gbc.fill = GridBagConstraints.HORIZONTAL; - gbc.insets = new Insets(2, 2, 2, 2); - selectionPanel.add(timeComboBox, gbc); - - gridy ++; - gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.anchor = GridBagConstraints.FIRST_LINE_END; - label = new JLabel("Variable"); - label.setFont(label.getFont().deriveFont(Font.BOLD)); - selectionPanel.add(label, gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 1; - gbc.gridy = gridy; - gbc.weighty = 1.0; - gbc.insets = new Insets(2, 2, 10, 2); - gbc.fill = GridBagConstraints.BOTH; - selectionPanel.add(new JScrollPane(varList), gbc); - return selectionPanel; - } - - private JPanel createTimePlotPanel() - { - timePlotPanel = new JPanel(new GridBagLayout()); - - int gridy = 0; - GridBagConstraints gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.anchor = GridBagConstraints.LINE_START; - timePlotLabel.setFont(timePlotLabel.getFont().deriveFont(Font.BOLD)); - timePlotPanel.add(timePlotLabel, gbc); - - ++ gridy; - gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.gridwidth = GridBagConstraints.REMAINDER; - gbc.gridheight = GridBagConstraints.REMAINDER; - gbc.weightx = 1.0; - gbc.weighty = 1.0; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.fill = GridBagConstraints.BOTH; - timePlotPanel.add(new JScrollPane(timePlotTable), gbc); - - return timePlotPanel; - } - - private void initialize() { - setSize(1000, 500); - GeneralGuiUtils.centerOnScreen(this); - - solTable.setModel(solTableModel); - solTable.setDefaultRenderer(Number.class, new SolTableCellRenderer()); - meshMetricsTable.setModel(meshMetricsTableModel); - timePlotTable.setModel(timePlotTableModel); - varList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); - - mainPanel.setLayout(new GridBagLayout()); - - JPanel meshMetricsPanel = new JPanel(new BorderLayout()); - meshMetricsPanel.add(new JScrollPane(meshMetricsTable), BorderLayout.CENTER); - - dataTabbedPane.addTab("Solution", createSolPanel()); - dataTabbedPane.addTab("Mesh Metrics", meshMetricsPanel); - dataTabbedPane.addTab("Time Plot", createTimePlotPanel()); - - int gridy = 0; - GridBagConstraints gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.weightx = 0.4; - gbc.fill = GridBagConstraints.HORIZONTAL; - gbc.insets = new Insets(2, 2, 2, 2); - mainPanel.add(createInputPanel(), gbc); - - gbc = new GridBagConstraints(); - gbc.gridx = 1; - gbc.gridy = gridy; - gbc.gridwidth = GridBagConstraints.REMAINDER; - gbc.gridheight = GridBagConstraints.REMAINDER; - gbc.weightx = 1.0; - gbc.weighty = 1.0; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.fill = GridBagConstraints.BOTH; - mainPanel.add(dataTabbedPane, gbc); - - gridy ++; - gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.insets = new Insets(2, 2, 2, 2); - gbc.fill = GridBagConstraints.BOTH; - gbc.weighty = 1.0; - gbc.weightx = 0.2; - mainPanel.add(createSelectionPanel(), gbc); - - gridy ++; - gbc = new GridBagConstraints(); - gbc.gridx = 0; - gbc.gridy = gridy; - gbc.insets = new Insets(2, 2, 20, 20); - gbc.anchor = GridBagConstraints.LINE_END; - mainPanel.add(exitButton, gbc); - - add(mainPanel); - reset(); - - resetButton.addActionListener(listener); - okButton.addActionListener(listener); - timeComboBox.addActionListener(listener); - exitButton.addActionListener(listener); - varList.addListSelectionListener(listener); - timePlotButton.setEnabled(false); - timePlotButton.addActionListener(listener); - solTable.getSelectionModel().addListSelectionListener(listener); - - dataDirTextField.addMouseListener(new MouseAdapter() { - @Override - public void mouseEntered(MouseEvent e) { - dataDirTextField.setToolTipText(dataDirTextField.getText()); - } - }); - varList.setCellRenderer(new DefaultListCellRenderer(){ - - @Override - public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { - super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); - if (value instanceof DataSetIdentifier) - { - setText(((DataSetIdentifier) value).getName()); - } - return this; - } - - }); - } - - private void reset() - { - userNameTextField.setText("boris"); - dataDirTextField.setText("\\\\cfs01.cam.uchc.edu\\ifs\\RAID\\Vcell\\users\\"); - - if (debug) - { - userNameTextField.setText("fgao1"); - dataDirTextField.setText("C:\\chombo\\data\\users\\"); -// simIdField.setText("77396269"); - simIdField.setText("77764707"); - } - } - - private static class SimDataInfoHolder{ - public SimulationDataSpatialHdf5 simData; - public File userDir; - public SimDataInfoHolder(SimulationDataSpatialHdf5 simData, File userDir) { - this.simData = simData; - this.userDir = userDir; - } - } - private SimDataInfoHolder createSimulationDataFromDir(File dataDir,String userid,VCSimulationDataIdentifier vcDataId) throws Exception{ - File userDir = new File(dataDir, userid); - return new SimDataInfoHolder(new SimulationDataSpatialHdf5(vcDataId, userDir, null),userDir); - } - private JPasswordField jPasswordField = new JPasswordField(); - private SimDataInfoHolder createSimulationDataFromRemote(String userid,VCSimulationDataIdentifier vcDataId) throws Exception{ - SimDataInfoHolder simDataInfoHolder = null; - SimulationDataSpatialHdf5 simData = null; - try{ - //Try well known primary data dir from windows -// if(true){throw new Exception();} - File userDir = new File("\\\\cfs01.cam.uchc.edu\\ifs\\raid\\vcell\\users",userid); - simData = new SimulationDataSpatialHdf5(vcDataId, userDir, null); - simDataInfoHolder = new SimDataInfoHolder(simData,userDir); - }catch(Exception e){ - try{ - //Try well known secondary data dir from windows -// if(true){throw new Exception();} - File userDir = new File("\\\\cfs02.cam.uchc.edu\\raid\\vcell\\users",userid); - simData = new SimulationDataSpatialHdf5(vcDataId,userDir, null); - simDataInfoHolder = new SimDataInfoHolder(simData,userDir); - }catch(Exception e2){ - //try ssh download from linux server - if(DialogUtils.showComponentOKCancelDialog(ChomboSimpleDataViewer.this, jPasswordField, "Enter cluster password for 'vcell'") != JOptionPane.OK_OPTION){ - throw UserCancelException.CANCEL_GENERIC; - } - - File tempSimDir = File.createTempFile("VCellUsersDir", ".dir"); - tempSimDir.delete(); - File tmpdir = new File(tempSimDir.getParentFile(),"VCellUsersDir"); - if(!tmpdir.exists() && !tmpdir.mkdir()){ - throw new Exception("Couldn't make local dir "+tmpdir); - } - File downloadDir = SimDataConnection.downloadSimData(tmpdir, new String(jPasswordField.getPassword()), userid, vcDataId.getSimulationKey(), 0, false); - simData = new SimulationDataSpatialHdf5(vcDataId,downloadDir, null); - simDataInfoHolder = new SimDataInfoHolder(simData,downloadDir); - } - } - return simDataInfoHolder; - } - private void retrieveVariablesAndTimes() - { - AsynchClientTask task0 = new AsynchClientTask("clear", AsynchClientTask.TASKTYPE_SWING_BLOCKING) { - - @Override - public void run(Hashtable hashTable) throws Exception { - DefaultComboBoxModel dcm = new DefaultComboBoxModel(); - timeComboBox.setModel(dcm); - DefaultListModel dlm = new DefaultListModel(); - varList.setModel(dlm); - solTableModel.clear(); - meshMetricsTableModel.clear(); - meanTextField.setText(null); - maxErrorTextField.setText(null); - sumVolFracTextField.setText(null); - l2ErrorTextField.setText(null); - } - }; - - AsynchClientTask task1 = new AsynchClientTask("retrieve data", AsynchClientTask.TASKTYPE_NONSWING_BLOCKING) { - - @Override - public void run(Hashtable hashTable) throws Exception { - String simId = simIdField.getText().trim(); - if (simId == null || simId.length() == 0) - { - throw new RuntimeException("Please provide a simulation id."); - } - String username = userNameTextField.getText().trim(); - if (username == null || username.length() == 0) - { - throw new RuntimeException("Please provide a user name."); - } - VCSimulationDataIdentifier vcDataId = new VCSimulationDataIdentifier(new VCSimulationIdentifier(new KeyValue(simId), new User(username, null)), 0); - SimDataInfoHolder simDataInfoHolder = null; - String datadir = dataDirTextField.getText(); - if (datadir == null || datadir.length() == 0){ - simDataInfoHolder = createSimulationDataFromRemote(username, vcDataId); - datadir = simDataInfoHolder.userDir.getParent(); - dataDirTextField.setText(datadir); - }else{ - simDataInfoHolder = createSimulationDataFromDir(new File(datadir), username, vcDataId); - } - simData = simDataInfoHolder.simData; - simData.readVarAndFunctionDataIdentifiers(); - usernames.add(username); - userNameTextField.setAutoCompletionWords(usernames); - datadirs.add(datadir); - dataDirTextField.setAutoCompletionWords(datadirs); - simIds.add(simId); - simIdField.setAutoCompletionWords(simIds); - } - }; - - AsynchClientTask task2 = new AsynchClientTask("show data", AsynchClientTask.TASKTYPE_SWING_BLOCKING) { - - @Override - public void run(Hashtable hashTable) throws Exception { - double[] times = simData.getDataTimes(); - DefaultComboBoxModel dcm = new DefaultComboBoxModel(); - for(double t : times) - { - dcm.addElement(t); - } - timeComboBox.setModel(dcm); - meshMetricsTableModel.setData(simData.getChomboMesh().getMetricsColumnNames(), simData.getChomboMesh().getMetricsNumbers()); - List dsiList = simData.getDataSetIdentifiers(); - DefaultListModel dlm = new DefaultListModel(); - for (DataSetIdentifier dsi : dsiList) - { - dlm.addElement(dsi); - } - varList.setModel(dlm); - if (times.length > 0) - { - timeComboBox.setSelectedIndex(0); - } - if (dsiList.size() > 0) - { - varList.setSelectedIndex(0); - } - meshMetricsTableModel.refreshTable(); - } - }; - ClientTaskDispatcher.dispatch(this, new Hashtable(), new AsynchClientTask[] {task0, task1, task2}, false); - } - - private void retrieveTimePlot() - { - if (solTable.getSelectedRow() < 0 || varList.getSelectedIndex() < 0) - { - return; - } - final int index = (Integer) solTable.getValueAt(solTable.getSelectedRow(), SolTableModel.COL_INDEX); - DataSetIdentifier selectedVar = (DataSetIdentifier)varList.getSelectedValue(); - final String varName = selectedVar.getName(); - - AsynchClientTask task0 = new AsynchClientTask("clear", AsynchClientTask.TASKTYPE_SWING_BLOCKING) { - - @Override - public void run(Hashtable hashTable) throws Exception { - timePlotTableModel.setTimesAndValues(new double[0], new double[0]); - } - }; - - AsynchClientTask task1 = new AsynchClientTask("retrieve data", AsynchClientTask.TASKTYPE_NONSWING_BLOCKING) { - - @Override - public void run(Hashtable hashTable) throws Exception { - double[] times = simData.getDataTimes(); - double[] values = new double[times.length]; - for (int i = 0; i < times.length; ++ i) - { - SimDataSet simDataBlock = simData.retrieveSimDataSet(times[i], varName); - values[i] = simDataBlock.solValues[index]; - } - hashTable.put("values", values); - } - }; - - AsynchClientTask task2 = new AsynchClientTask("show data", AsynchClientTask.TASKTYPE_SWING_BLOCKING) { - - @Override - public void run(Hashtable hashTable) throws Exception { - timePlotLabel.setText("Varaible " + varName + " @ Index " + index); - double[] times = simData.getDataTimes(); - double[] values = (double[]) hashTable.get("values"); - timePlotTableModel.setTimesAndValues(times, values); - dataTabbedPane.setSelectedComponent(timePlotPanel); - } - }; - ClientTaskDispatcher.dispatch(this, new Hashtable(), new AsynchClientTask[] {task0, task1, task2}, false); - } - -// private void readMeshMetricsFile(File userDir, VCSimulationDataIdentifier vcDataId, String simId) throws IOException -// { -// File meshMetricsFile = new File(userDir, vcDataId.getID() + ".chombo.memmetrics"); -// if (!meshMetricsFile.exists()) -// { -// return; -// } -// BufferedReader br = null; -// try -// { -// br = new BufferedReader(new FileReader(meshMetricsFile)); -// List cols = new ArrayList(); -// List values = new ArrayList(); -// String line = br.readLine(); -// if (line != null) -// { -// StringTokenizer st = new StringTokenizer(line, ","); -// while (st.hasMoreTokens()) -// { -// String token = st.nextToken(); -// cols.add(token); -// } -// } -// while (true) -// { -// line = br.readLine(); -// if (line == null) -// { -// break; -// } -// double[] dvalues = new double[cols.size()]; -// StringTokenizer st = new StringTokenizer(line, ","); -// int cnt = 0; -// while (st.hasMoreTokens()) -// { -// String token = st.nextToken(); -// dvalues[cnt] = Double.parseDouble(token); -// ++ cnt; -// } -// assert cnt == cols.size(); -// values.add(dvalues); -// } -// meshMetricsTableModel.setData(cols, values); -// } -// finally -// { -// if (br != null) -// { -// br.close(); -// } -// } -// } - - private void retrieveData() - { - final Double time = (Double)timeComboBox.getSelectedItem(); - if (time == null) - { - return; - } - DataSetIdentifier selectedVar = (DataSetIdentifier)varList.getSelectedValue(); - if (selectedVar == null) - { - return; - } - final String varName = selectedVar.getName(); - AsynchClientTask task0 = new AsynchClientTask("clear", AsynchClientTask.TASKTYPE_SWING_BLOCKING) { - - @Override - public void run(Hashtable hashTable) throws Exception { - solTableModel.clear(); - timePlotTableModel.clear(); - solLabel.setText("Solution"); - timePlotLabel.setText("Time Plot"); - meanTextField.setText(null); - maxErrorTextField.setText(null); - sumVolFracTextField.setText(null); - l2ErrorTextField.setText(null); - } - }; - - AsynchClientTask task1 = new AsynchClientTask("retrieve data", AsynchClientTask.TASKTYPE_NONSWING_BLOCKING) { - - @Override - public void run(Hashtable hashTable) throws Exception { - if (timeComboBox.getSelectedIndex() < 0 || varList.getSelectedIndex() < 0) - { - return; - } - SimDataSet simDataSet = simData.retrieveSimDataSet(time, varName); - hashTable.put("simDataSet", simDataSet); - } - }; - - AsynchClientTask task2 = new AsynchClientTask("show data", AsynchClientTask.TASKTYPE_SWING_BLOCKING) { - - @Override - public void run(Hashtable hashTable) throws Exception { - SimDataSet simDataSet = (SimDataSet) hashTable.get("simDataSet"); - if (simDataSet == null) - { - return; - } - solLabel.setText("Variable " + varName + " @ Time " + time); - solTableModel.setValues(simDataSet.solValues); - meanTextField.setText(simDataSet.mean == null ? "" : simDataSet.mean.toString()); - sumVolFracTextField.setText(simDataSet.sumVolFrac == null ? "" : simDataSet.sumVolFrac.toString()); - maxErrorTextField.setText(simDataSet.maxError == null ? "" : simDataSet.maxError.toString()); - l2ErrorTextField.setText(simDataSet.l2Error == null ? "" : simDataSet.l2Error + "".toString()); - } - }; - ClientTaskDispatcher.dispatch(this, new Hashtable(), new AsynchClientTask[] {task0, task1, task2}, false); - } - - // TODO: Make as a test - public static void main(String[] args) { - if (args.length > 0 && Boolean.parseBoolean(args[0])){ - ChomboSimpleDataViewer.debug = true; - } - VCellLookAndFeel.setVCellLookAndFeel(); - ChomboSimpleDataViewer chomboSimpleDataViewer = new ChomboSimpleDataViewer(); - chomboSimpleDataViewer.setVisible(true); - } -} From adf6fb6d2b562e2985a46b34df12e6b039a1c58b Mon Sep 17 00:00:00 2001 From: Jim Schaff Date: Wed, 24 Apr 2024 11:39:50 -0400 Subject: [PATCH 05/16] factor exportTableToHDF5 out of Plot2DDataPanel --- .../java/cbit/plot/gui/Plot2DDataPanel.java | 522 ++---------------- .../java/cbit/vcell/simdata/Hdf5Utils.java | 183 +++++- 2 files changed, 235 insertions(+), 470 deletions(-) diff --git a/vcell-client/src/main/java/cbit/plot/gui/Plot2DDataPanel.java b/vcell-client/src/main/java/cbit/plot/gui/Plot2DDataPanel.java index a9b00ec6a2..d4aa587244 100644 --- a/vcell-client/src/main/java/cbit/plot/gui/Plot2DDataPanel.java +++ b/vcell-client/src/main/java/cbit/plot/gui/Plot2DDataPanel.java @@ -9,53 +9,31 @@ */ package cbit.plot.gui; -import java.awt.Component; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.awt.event.MouseEvent; -import java.io.File; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.ListIterator; -import java.util.Vector; - -import javax.swing.ButtonGroup; -import javax.swing.JFileChooser; -import javax.swing.JFrame; -import javax.swing.JMenuItem; -import javax.swing.JOptionPane; -import javax.swing.JPanel; -import javax.swing.JPopupMenu; -import javax.swing.JRadioButton; -import javax.swing.KeyStroke; -import javax.swing.table.DefaultTableModel; -import org.apache.commons.lang3.StringUtils; +import cbit.plot.Plot2D; +import cbit.vcell.client.UserMessage; +import cbit.vcell.desktop.VCellTransferable; +import cbit.vcell.math.ReservedVariable; +import cbit.vcell.parser.Expression; +import cbit.vcell.parser.SymbolTableEntry; +import cbit.vcell.simdata.Hdf5Utils; +import cbit.vcell.solver.Simulation; +import com.google.common.io.Files; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.vcell.util.UtilCancelException; import org.vcell.util.gui.DialogUtils; import org.vcell.util.gui.NonEditableDefaultTableModel; import org.vcell.util.gui.ScrollTable; -import org.vcell.util.gui.SimpleUserMessage; import org.vcell.util.gui.SpecialtyTableRenderer; -import com.google.common.io.Files; +import javax.swing.*; +import java.awt.*; +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.awt.event.MouseEvent; +import java.io.File; -import cbit.plot.Plot2D; -import cbit.vcell.client.UserMessage; -import cbit.vcell.desktop.VCellTransferable; -import cbit.vcell.math.ReservedVariable; -import cbit.vcell.parser.Expression; -import cbit.vcell.parser.SimpleSymbolTable; -import cbit.vcell.parser.SymbolTableEntry; -import cbit.vcell.simdata.Hdf5Utils; -import cbit.vcell.simdata.Hdf5Utils.HDF5WriteHelper; -import cbit.vcell.solver.Simulation; -import ncsa.hdf.hdf5lib.H5; -import ncsa.hdf.hdf5lib.HDF5Constants; -import javax.swing.JLabel; -import java.awt.BorderLayout; /** * Insert the type's description here. * Creation date: (4/19/2001 12:33:58 PM) @@ -305,12 +283,7 @@ public void setSimulation(Simulation simulation) { private synchronized void copyCells(CopyAction copyAction) { copyCells0(copyAction,false); } -/** - * Insert the method's description here. - * Creation date: (4/20/2001 4:52:52 PM) - * @param actionCommand java.lang.String - * @return java.lang.String - */ + private synchronized void copyCells0(CopyAction copyAction,boolean isHDF5) { try{ int r = 0; @@ -375,438 +348,49 @@ else if (copyAction == CopyAction.copyrow) { return; } } - int hdf5FileID = -1;//Used if HDF5 format - File hdf5TempFile = null; -// Hdf5Utils.HDF5WriteHelper help0 = null; - try { - hdf5TempFile = File.createTempFile("plot2D", ".hdf"); - //System.out.println("/home/vcell/Downloads/hdf5/HDFView/bin/HDFView "+hdf5TempFile.getAbsolutePath()); - hdf5FileID = H5.H5Fcreate(hdf5TempFile.getAbsolutePath(), HDF5Constants.H5F_ACC_TRUNC,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); - ArrayList> paramScanJobs = new ArrayList>(); - if(!bHistogram && !getScrollPaneTable().getColumnName(0).equals((xVarColumnName==null?ReservedVariable.TIME.getName():xVarColumnName))) { - throw new Exception("Expecting first column in table to have name '"+xVarColumnName+"'"); - } - //Add arraylist for the parameter scan job, add the index of the xval column - for(int i=0;i tempAL = new ArrayList(); - paramScanJobs.add(tempAL); - break; - } else if(getScrollPaneTable().getColumnName(i).equals((xVarColumnName==null?ReservedVariable.TIME.getName():xVarColumnName))){ - if(i==0) { - ArrayList tempAL = new ArrayList(); - tempAL.add(i); - paramScanJobs.add(tempAL); - }else { - String str1 = getScrollPaneTable().getColumnName(i-1); - int str1Index = str1.lastIndexOf("Set "); - String str2 = getScrollPaneTable().getColumnName(i+1); - int str2Index = str2.lastIndexOf("Set "); - if(!str1.substring(str1Index).equals(str2.substring(str2Index))) { - ArrayList tempAL = new ArrayList(); - tempAL.add(i); - paramScanJobs.add(tempAL); - }else { - continue; - } - } - } - } - //Add selected columns to the proper paramscan arraylist - for(int j=0;j= paramScanJobs.get(k).get(0) && ((k+1) == paramScanJobs.size() || columns[j] < paramScanJobs.get(k+1).get(0))) { - paramScanJobs.get(k).add(columns[j]); -// System.out.println("HDF5frm"+getScrollPaneTable().getColumnName(columns[j])); - } - } - } + int columnCount = getScrollPaneTable().getColumnCount(); + int rowCount = getScrollPaneTable().getRowCount(); + String[] columnNames = new String[columnCount]; + for (int i=0; i listIterator = paramScanJobs.get(k).listIterator(); - if(paramScanJobs.get(k).size() > 1) {// keep x val is there more selections for this set - listIterator.next(); - } - while(listIterator.hasNext()) { - final Integer columIndex = listIterator.next(); - boolean bFound = false; - for(int j=0;j> listIterator = paramScanJobs.listIterator(); -// while(listIterator.hasNext()) { -// final ArrayList next = listIterator.next(); -// if(next.size() == 0) { -// listIterator.remove(); -// } -//// selectedColCount+= next.size(); -// } - //Write out the data to HDF5 file - for(int k=0;k dataTypes = new ArrayList(); - ArrayList dataIDs = new ArrayList(); - ArrayList dataShapes = new ArrayList(); - ArrayList dataLabels = new ArrayList(); - ArrayList dataNames = new ArrayList(); - ArrayList paramNames = new ArrayList(); - ArrayList paramValues = new ArrayList(); - boolean bParamsDone = false; - for(int cols=0;cols paramScanJobCols = null; -// for(int i=0;i(); -// }else { -// continue; -// } -// for(int j=0;j= lastXCol) { -// paramScanJobCols.add(columns[j]); -// } -// } -// if(paramScanJobCols.size() > 0) { -// paramScanJobs.add(paramScanJobCols); -// } -// } -// -// for(int i=0;i xColumns = new ArrayList(); -// //Check if multiple columns with time (happens when viewing 'Time Plot with Multiple Parameter Value-sets') -//// ArrayList nonTColumns = new ArrayList(); -// for(int i=0;i 0) { -//// hdfValues = new double[rows.length*nonTColumns.size()]; -//// int cnt=0; -//// for(int j=0;j> paramScanJobs = new ArrayList<>(); + if(!bHistogram && !columnNames[0].equals((xVarColumnName==null? ReservedVariable.TIME.getName():xVarColumnName))) { + throw new Exception("Expecting first column in table to have name '"+xVarColumnName+"'"); + } + //Add arraylist for the parameter scan job, add the index of the xval column + for(int i=0;i tempAL = new ArrayList(); + paramScanJobs.add(tempAL); + break; + } else if(columnNames[i].equals((xVarColumnName==null?ReservedVariable.TIME.getName():xVarColumnName))){ + if(i==0) { + ArrayList tempAL = new ArrayList(); + tempAL.add(i); + paramScanJobs.add(tempAL); + }else { + String str1 = columnNames[i-1]; + int str1Index = str1.lastIndexOf("Set "); + String str2 = columnNames[i+1]; + int str2Index = str2.lastIndexOf("Set "); + if(!str1.substring(str1Index).equals(str2.substring(str2Index))) { + ArrayList tempAL = new ArrayList(); + tempAL.add(i); + paramScanJobs.add(tempAL); + } + } + } + } + //Add selected columns to the proper paramscan arraylist + for(int j = 0; j< columns.length; j++) { + if(bHistogram) { + paramScanJobs.get(0).add(columns[j]); + }else { + if(columnNames[columns[j]].equals((xVarColumnName==null?ReservedVariable.TIME.getName():xVarColumnName))){ + continue;//skip xcolumns + } + for(int k=0;k= paramScanJobs.get(k).get(0) && ((k+1) == paramScanJobs.size() || columns[j] < paramScanJobs.get(k+1).get(0))) { + paramScanJobs.get(k).add(columns[j]); +// System.out.println("HDF5frm"+columnNames[columns[j])); + } + } + } + } + //Remove unselected indexes from set lists + for(int k=0;k listIterator = paramScanJobs.get(k).listIterator(); + if(paramScanJobs.get(k).size() > 1) {// keep x val is there more selections for this set + listIterator.next(); + } + while(listIterator.hasNext()) { + final Integer columIndex = listIterator.next(); + boolean bFound = false; + for(int j = 0; j< columns.length; j++) { + if(columIndex == columns[j]) { + bFound = true; + break; + } + } + if(!bFound) { + listIterator.remove(); + } + } + } + //Write out the data to HDF5 file + for(int k=0;k dataTypes = new ArrayList(); + ArrayList dataIDs = new ArrayList(); + ArrayList dataShapes = new ArrayList(); + ArrayList dataLabels = new ArrayList(); + ArrayList dataNames = new ArrayList(); + ArrayList paramNames = new ArrayList(); + ArrayList paramValues = new ArrayList(); + boolean bParamsDone = false; + for(int cols=0;cols Date: Wed, 24 Apr 2024 11:53:10 -0400 Subject: [PATCH 06/16] extract vcell-client HDF5 MultiTrialStatistics reading to core --- .../vcell/client/data/ODEDataInterface.java | 12 +- .../client/data/ODEDataInterfaceImpl.java | 141 ++---------------- .../vcell/client/data/SimResultsViewer.java | 112 ++------------ .../gui/ODESolverPlotSpecificationPanel.java | 50 ++----- ...ultiTrialNonspatialStochSimDataReader.java | 127 ++++++++++++++++ .../vcell/simdata/SummaryStatisticType.java | 8 + 6 files changed, 176 insertions(+), 274 deletions(-) create mode 100644 vcell-core/src/main/java/cbit/vcell/simdata/MultiTrialNonspatialStochSimDataReader.java create mode 100644 vcell-core/src/main/java/cbit/vcell/simdata/SummaryStatisticType.java diff --git a/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterface.java b/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterface.java index 5507c124a9..4c2166b09b 100644 --- a/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterface.java +++ b/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterface.java @@ -1,8 +1,8 @@ package cbit.vcell.client.data; import java.beans.PropertyChangeListener; -import java.util.LinkedHashMap; +import cbit.vcell.simdata.SummaryStatisticType; import org.vcell.util.ObjectNotFoundException; import cbit.vcell.math.FunctionColumnDescription; @@ -12,13 +12,6 @@ import cbit.vcell.util.ColumnDescription; public interface ODEDataInterface { - - public enum PlotType { - Min, - Max, - Mean, - Std - } void removePropertyChangeListener(PropertyChangeListener propertyChangeListener); @@ -35,9 +28,8 @@ public enum PlotType { FunctionColumnDescription[] getFunctionColumnDescriptions(); - LinkedHashMap parseHDF5File() throws ExpressionException,ObjectNotFoundException; double[] extractColumn(String columnName) throws ExpressionException,ObjectNotFoundException; - double[] extractColumn(String columnName, PlotType plotType) throws ExpressionException,ObjectNotFoundException; + double[] extractColumn(String columnName, SummaryStatisticType summaryStatisticType) throws ExpressionException,ObjectNotFoundException; // double[] extractColumnMin(String columnName) throws ExpressionException,ObjectNotFoundException; // double[] extractColumnStd(String columnName) throws ExpressionException,ObjectNotFoundException; diff --git a/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterfaceImpl.java b/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterfaceImpl.java index 112ac42a43..7342b94417 100644 --- a/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterfaceImpl.java +++ b/vcell-client/src/main/java/cbit/vcell/client/data/ODEDataInterfaceImpl.java @@ -1,20 +1,9 @@ package cbit.vcell.client.data; -import java.beans.PropertyChangeListener; -import java.beans.PropertyChangeSupport; -import java.io.File; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; - -import org.vcell.util.ObjectNotFoundException; -import org.vcell.util.document.VCDataIdentifier; - -import com.google.common.io.Files; - import cbit.vcell.math.FunctionColumnDescription; import cbit.vcell.parser.ExpressionException; +import cbit.vcell.simdata.MultiTrialNonspatialStochSimDataReader; +import cbit.vcell.simdata.SummaryStatisticType; import cbit.vcell.solver.DataSymbolMetadata; import cbit.vcell.solver.SimulationModelInfo; import cbit.vcell.solver.SimulationModelInfo.DataSymbolMetadataResolver; @@ -22,10 +11,12 @@ import cbit.vcell.solver.ode.ODESimData; import cbit.vcell.solver.ode.ODESolverResultSet; import cbit.vcell.util.ColumnDescription; -import ncsa.hdf.object.FileFormat; -import ncsa.hdf.object.Group; -import ncsa.hdf.object.HObject; -import ncsa.hdf.object.h5.H5ScalarDS; +import org.vcell.util.ObjectNotFoundException; +import org.vcell.util.document.VCDataIdentifier; + +import java.beans.PropertyChangeListener; +import java.beans.PropertyChangeSupport; +import java.util.ArrayList; class ODEDataInterfaceImpl implements ODEDataInterface { @@ -107,117 +98,13 @@ public double[] extractColumn(String columnName) throws ExpressionException,Obje } @Override - public LinkedHashMap parseHDF5File() { - FileFormat hdf5FileFormat = null; - File to = null; - LinkedHashMap valueToIndexMap = new LinkedHashMap<>(); - try { - ODESolverResultSet osrs = getOdeSolverResultSet(); - if(osrs instanceof ODESimData) { - byte[] hdf5FileBytes = ((ODESimData)getOdeSolverResultSet()).getHdf5FileBytes(); - if(hdf5FileBytes != null) { - to = File.createTempFile("odeStats_"+simulationModelInfo.getSimulationName(), ".hdf5"); - Files.write(hdf5FileBytes, to); - FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); - if (fileFormat == null){ - throw new Exception("Cannot find HDF5 FileFormat."); - } - // open the file with read-only access - hdf5FileFormat = fileFormat.createInstance(to.getAbsolutePath(), FileFormat.READ); - // open the file and retrieve the file structure - hdf5FileFormat.open(); - Group root = (Group)((javax.swing.tree.DefaultMutableTreeNode)hdf5FileFormat.getRootNode()).getUserObject(); - List postProcessMembers = ((Group)root).getMemberList(); - - HObject varNames = null; - for(HObject nextHObject : postProcessMembers) { - if(nextHObject.getName().equals("VarNames")) { - varNames = nextHObject; - break; - // SimTimes - // StatMax - // StatMean - // StatMin - // StatStdDev - // VarNames - } - } - H5ScalarDS h5ScalarDS = (H5ScalarDS)varNames; - h5ScalarDS.init(); - try { - long[] dims = h5ScalarDS.getDims(); - System.out.println("---"+varNames.getName()+" "+varNames.getClass().getName()+" Dimensions="+Arrays.toString(dims)); - Object obj = h5ScalarDS.read(); - String[] values = (String[])obj; - for(int i=0; i postProcessMembers = ((Group)root).getMemberList(); - for(HObject nextHObject : postProcessMembers) { - System.out.println(nextHObject.getName()+" "+nextHObject.getClass().getName()); - H5ScalarDS h5ScalarDS = (H5ScalarDS)nextHObject; - h5ScalarDS.init(); - try { - long[] dims = h5ScalarDS.getDims(); - System.out.println("---"+nextHObject.getName()+" "+nextHObject.getClass().getName()+" Dimensions="+Arrays.toString(dims)); - Object obj = h5ScalarDS.read(); - if(dims.length == 2) { - double[] columns = new double[(int)dims[1]]; - for(int row=0;row postProcessMembers = ((Group)root).getMemberList(); - for(HObject nextHObject:postProcessMembers){ - //System.out.println(nextHObject.getName()+"\n"+nextHObject.getClass().getName()); - H5ScalarDS h5ScalarDS = (H5ScalarDS)nextHObject; - h5ScalarDS.init(); - try { - long[] dims = h5ScalarDS.getDims(); - System.out.println("---"+nextHObject.getName()+" "+nextHObject.getClass().getName()+" Dimensions="+Arrays.toString(dims)); - Object obj = h5ScalarDS.read(); - if(dims.length == 2) { - //dims[0]=numTimes (will be the same as 'SimTimes' data length) - //dims[1]=numVars (will be the same as 'VarNames' data length) - //if name='StatMean' this is the same as the default data saved in the odeSolverresultSet - double[] columns = new double[(int)dims[1]]; - for(int row=0;row postProcessMembers = ((Group)root).getMemberList(); + for(HObject nextHObject : postProcessMembers) { + System.out.println(nextHObject.getName()+" "+nextHObject.getClass().getName()); + H5ScalarDS h5ScalarDS = (H5ScalarDS)nextHObject; + h5ScalarDS.init(); + try { + long[] dims = h5ScalarDS.getDims(); + System.out.println("---"+nextHObject.getName()+" "+nextHObject.getClass().getName()+" Dimensions="+Arrays.toString(dims)); + Object obj = h5ScalarDS.read(); + if(dims.length == 2) { + double[] columns = new double[(int)dims[1]]; + for(int row=0;row postProcessMembers = ((Group)root).getMemberList(); + for(HObject nextHObject:postProcessMembers){ + //System.out.println(nextHObject.getName()+"\n"+nextHObject.getClass().getName()); + H5ScalarDS h5ScalarDS = (H5ScalarDS)nextHObject; + h5ScalarDS.init(); + try { + long[] dims = h5ScalarDS.getDims(); + System.out.println("---"+nextHObject.getName()+" "+nextHObject.getClass().getName()+" Dimensions="+ Arrays.toString(dims)); + Object obj = h5ScalarDS.read(); + if(dims.length == 2) { + //dims[0]=numTimes (will be the same as 'SimTimes' data length) + //dims[1]=numVars (will be the same as 'VarNames' data length) + //if name='StatMean' this is the same as the default data saved in the odeSolverresultSet + double[] columns = new double[(int)dims[1]]; + for(int row=0;row Date: Wed, 24 Apr 2024 11:55:26 -0400 Subject: [PATCH 07/16] separate HDF5 code for MovingBoundary and Chombo solvers. --- .../vcell/simdata/ChomboSimDataReader.java | 310 +++++++++++ ...=> ChomboSimpleSimDataReader_NotUsed.java} | 67 +-- .../main/java/cbit/vcell/simdata/DataSet.java | 490 +----------------- .../simdata/MovingBoundarySimDataReader.java | 247 +++++++++ .../cbit/vcell/simdata/SimDataReader.java | 110 +--- .../cbit/vcell/simdata/SimulationData.java | 5 +- 6 files changed, 611 insertions(+), 618 deletions(-) create mode 100644 vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimDataReader.java rename vcell-core/src/main/java/cbit/vcell/simdata/{SimulationDataSpatialHdf5.java => ChomboSimpleSimDataReader_NotUsed.java} (87%) create mode 100644 vcell-core/src/main/java/cbit/vcell/simdata/MovingBoundarySimDataReader.java diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimDataReader.java b/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimDataReader.java new file mode 100644 index 0000000000..44fb73623b --- /dev/null +++ b/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimDataReader.java @@ -0,0 +1,310 @@ +package cbit.vcell.simdata; + +import cbit.vcell.math.InsideVariable; +import cbit.vcell.math.OutsideVariable; +import cbit.vcell.math.Variable; +import ncsa.hdf.object.*; +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipFile; +import org.vcell.util.DataAccessException; + +import javax.swing.tree.DefaultMutableTreeNode; +import java.io.*; +import java.util.List; +import java.util.Vector; +import java.util.zip.ZipEntry; + +public class ChomboSimDataReader { + public static void getNextDataAtCurrentTimeChombo(double[][] returnValues, ZipFile currentZipFile, String[] varNames, int[][] varIndexes, String[] simDataFileNames, int masterTimeIndex) throws Exception { + File tempFile = null; + FileFormat solFile = null; + try { + tempFile = createTempHdf5File(currentZipFile, simDataFileNames[masterTimeIndex]); + + FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); + solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ); + solFile.open(); + + for(int k = 0; k < varNames.length; ++ k) { + try { + boolean bExtrapolatedValue = false; + String varName = varNames[k]; + if (varName.endsWith(InsideVariable.INSIDE_VARIABLE_SUFFIX)) + { + bExtrapolatedValue = true; + varName = varName.substring(0, varName.lastIndexOf(InsideVariable.INSIDE_VARIABLE_SUFFIX)); + } + else if (varName.endsWith(OutsideVariable.OUTSIDE_VARIABLE_SUFFIX)) + { + bExtrapolatedValue = true; + varName = varName.substring(0, varName.lastIndexOf(OutsideVariable.OUTSIDE_VARIABLE_SUFFIX)); + } + double[] sol = null; + if (bExtrapolatedValue) + { + sol = readChomboExtrapolatedValues(varName, solFile); + } + else + { + String varPath = Hdf5Utils.getVarSolutionPath(varNames[k]); + HObject solObj = FileFormat.findObject(solFile, varPath); + if (solObj instanceof Dataset) { + Dataset dataset = (Dataset)solObj; + sol = (double[]) dataset.read(); + } + } + if (sol != null) + { + for(int l = 0;l < varIndexes[k].length; ++ l) { + int idx = varIndexes[k][l]; + double val = sol[idx]; + returnValues[k][l] = val; + } + } + } catch (Exception e) { + throw new DataAccessException(e.getMessage(), e); + } + } + } finally { + try { + if (solFile != null) { + solFile.close(); + } + if (tempFile != null) { + if (!tempFile.delete()) { + System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath()); + } + } + } catch(Exception e) { + // ignore + } + } + } + + public static void readHdf5SolutionMetaData(InputStream is, Vector dataBlockList) throws Exception + { + File tempFile = null; + FileFormat solFile = null; + try{ + tempFile = createTempHdf5File(is); + + FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); + solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ); + solFile.open(); + DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode(); + Group rootGroup = (Group)rootNode.getUserObject(); + List solGroups = rootGroup.getMemberList(); + + for (HObject memberGroup : solGroups) + { + if (memberGroup instanceof Group && memberGroup.getName().equals("solution")) + { + Group solGroup = (Group) memberGroup; + List memberList = solGroup.getMemberList(); + for (HObject member : memberList) + { + if (!(member instanceof Dataset)){ + continue; + } + Dataset dataset = (Dataset)member; + String dsname = dataset.getName(); + int vt = -1; + String domain = null; + List solAttrList = dataset.getMetadata(); + for (Attribute attr : solAttrList) + { + String attrName = attr.getName(); + if(attrName.equals("variable type")){ + Object obj = attr.getValue(); + vt = ((int[])obj)[0]; + } else if (attrName.equals("domain")) { + Object obj = attr.getValue(); + domain = ((String[])obj)[0]; + } + } + long[] dims = dataset.getDims(); + String varName = domain == null ? dsname : domain + Variable.COMBINED_IDENTIFIER_SEPARATOR + dsname; + dataBlockList.addElement(cbit.vcell.simdata.DataBlock.createDataBlock(varName, vt, (int) dims[0], 0)); + } + break; + } + } + } finally { + try { + if (solFile != null) { + solFile.close(); + } + if (tempFile != null) { + if (!tempFile.delete()) { + System.err.println("couldn't delete temp file " + tempFile); + } + } + } catch(Exception e) { + // ignore + } + } + } + + public static double[] readHdf5VariableSolution(File zipfile, String fileName, String varName) throws Exception{ + + File tempFile = null; + FileFormat solFile = null; + try{ + tempFile = createTempHdf5File(zipfile, fileName); + + FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); + solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ); + solFile.open(); + if (varName != null) + { + String varPath = Hdf5Utils.getVarSolutionPath(varName); + HObject solObj = FileFormat.findObject(solFile, varPath); + if (solObj instanceof Dataset) + { + Dataset dataset = (Dataset)solObj; + return (double[]) dataset.read(); + } + } + } finally { + try { + if (solFile != null) { + solFile.close(); + } + if (tempFile != null) { + if (!tempFile.delete()) { + System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath()); + } + } + } catch(Exception e) { + // ignore + } + } + return null; + } + + public static double[] readChomboExtrapolatedValues(String varName, File pdeFile, File zipFile) throws IOException { + double[] data = null; + if (zipFile != null && DataSet.isChombo(zipFile)) { + File tempFile = null; + FileFormat solFile = null; + try{ + tempFile = createTempHdf5File(zipFile, pdeFile.getName()); + + FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); + solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ); + solFile.open(); + data = readChomboExtrapolatedValues(varName, solFile); + } catch(Exception e) { + throw new IOException(e.getMessage(), e); + } finally { + try { + if (solFile != null) { + solFile.close(); + } + if (tempFile != null) { + if (!tempFile.delete()) { + System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath()); + } + } + } catch(Exception e) { + // ignore + } + } + } + return data; + } + + private static double[] readChomboExtrapolatedValues(String varName, FileFormat solFile) throws Exception { + double data[] = null; + if (varName != null) + { + String varPath = Hdf5Utils.getVolVarExtrapolatedValuesPath(varName); + HObject solObj = FileFormat.findObject(solFile, varPath); + if (solObj == null) + { + throw new IOException("Extrapolated values for variable '" + varName + "' does not exist in the results."); + } + if (solObj instanceof Dataset) + { + Dataset dataset = (Dataset)solObj; + return (double[]) dataset.read(); + } + } + return data; + } + + private static File createTempHdf5File(File zipFile, String fileName) throws IOException + { + ZipFile zipZipFile = null; + try + { + zipZipFile = DataSet.openZipFile(zipFile); + return createTempHdf5File(zipZipFile, fileName); + } + finally + { + try + { + if (zipZipFile != null) + { + zipZipFile.close(); + } + } + catch (Exception ex) + { + // ignore + } + } + } + + private static File createTempHdf5File(ZipFile zipFile, String fileName) throws IOException + { + InputStream is = null; + try + { + ZipEntry dataEntry = zipFile.getEntry(fileName); + is = zipFile.getInputStream((ZipArchiveEntry) dataEntry); + return createTempHdf5File(is); + } + finally + { + try + { + if (is != null) + { + is.close(); + } + } + catch (Exception ex) + { + // ignore + } + } + } + + private static File createTempHdf5File(InputStream is) throws IOException + { + OutputStream out = null; + try{ + File tempFile = File.createTempFile("temp", "hdf5"); + out=new FileOutputStream(tempFile); + byte buf[] = new byte[1024]; + int len; + while((len=is.read(buf))>0) { + out.write(buf,0,len); + } + return tempFile; + } + finally + { + try { + if (out != null) { + out.close(); + } + } catch (Exception ex) { + // ignore + } + } + } + +} diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/SimulationDataSpatialHdf5.java b/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimpleSimDataReader_NotUsed.java similarity index 87% rename from vcell-core/src/main/java/cbit/vcell/simdata/SimulationDataSpatialHdf5.java rename to vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimpleSimDataReader_NotUsed.java index 31272bc783..f3d83973c1 100644 --- a/vcell-core/src/main/java/cbit/vcell/simdata/SimulationDataSpatialHdf5.java +++ b/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimpleSimDataReader_NotUsed.java @@ -1,23 +1,13 @@ package cbit.vcell.simdata; -import java.io.BufferedInputStream; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.StringTokenizer; -import java.util.Vector; -import java.util.zip.ZipEntry; - -import javax.swing.tree.DefaultMutableTreeNode; - +import cbit.vcell.math.Variable; +import cbit.vcell.math.Variable.Domain; +import cbit.vcell.math.VariableType; +import cbit.vcell.mongodb.VCMongoMessage; +import ncsa.hdf.hdf5lib.H5; +import ncsa.hdf.object.*; +import ncsa.hdf.object.h5.H5CompoundDS; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; -//import java.util.zip.ZipFile; import org.apache.commons.compress.archivers.zip.ZipFile; import org.vcell.chombo.ChomboBox; import org.vcell.util.DataAccessException; @@ -26,19 +16,12 @@ import org.vcell.util.Origin; import org.vcell.util.document.VCDataIdentifier; -import cbit.vcell.math.Variable; -import cbit.vcell.math.Variable.Domain; -import cbit.vcell.math.VariableType; -import cbit.vcell.mongodb.VCMongoMessage; -import ncsa.hdf.hdf5lib.H5; -import ncsa.hdf.object.Attribute; -import ncsa.hdf.object.Dataset; -import ncsa.hdf.object.FileFormat; -import ncsa.hdf.object.Group; -import ncsa.hdf.object.HObject; -import ncsa.hdf.object.h5.H5CompoundDS; +import javax.swing.tree.DefaultMutableTreeNode; +import java.io.*; +import java.util.*; +import java.util.zip.ZipEntry; -public class SimulationDataSpatialHdf5 +public class ChomboSimpleSimDataReader_NotUsed { public static class SimLogFileEntry { @@ -168,7 +151,7 @@ public static class SimDataSet private long logFileLength = 0; private ChomboMesh chomboMesh; - public SimulationDataSpatialHdf5(VCDataIdentifier argVCDataID, File primaryUserDir, File secondaryUserDir) + public ChomboSimpleSimDataReader_NotUsed(VCDataIdentifier argVCDataID, File primaryUserDir, File secondaryUserDir) throws IOException, DataAccessException { this.vcDataId = argVCDataID; @@ -184,11 +167,11 @@ public SimulationDataSpatialHdf5(VCDataIdentifier argVCDataID, File primaryUserD "does not exist in primary [" + primaryUserDir + "] or secondary [" + secondaryUserDir + "] user directory ."); } } - VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.SimulationDataSpatialHdf5() <>"); + VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.ChomboSimpleSimDataReader_NotUsed() <>"); } public synchronized void readVarAndFunctionDataIdentifiers() throws Exception { - VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.readVarAndFunctionDataIdentifiers Entry"); + VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.readVarAndFunctionDataIdentifiers Entry"); readLogFile(); if(chomboMesh == null){ chomboMesh = readMeshFile(new File(userDirectory, getMeshFileName())); @@ -371,15 +354,15 @@ private File findLogFile() { if (logFile == null) { logFile = new File(userDirectory, getLogFileName()); - VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.getLogFile() <> calling logile.exists()"); + VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.getLogFile() <> calling logile.exists()"); if (logFile.exists()) { - VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.getLogFile() <> file found"); + VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.getLogFile() <> file found"); } else { logFile = null; - VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.getLogFile() <> file found"); + VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.getLogFile() <> file found"); } } @@ -391,16 +374,16 @@ private File findLogFile() { * @throws IOException */ private synchronized void readLogFile() throws DataAccessException, IOException { - VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.readLog() <>"); + VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.readLog() <>"); if (logFile == null){ - VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.readLog() log file not found <>"); + VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.readLog() log file not found <>"); throw new DataAccessException("log file not found for " + vcDataId); } - VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.readLog() logFile exists"); + VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.readLog() logFile exists"); long length = logFile.length(); long lastModified = logFile.lastModified(); if (lastModified == logFileLastModified && logFileLength == length) { - VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.readLog() hasn't been modified ... <>"); + VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.readLog() hasn't been modified ... <>"); return; } @@ -414,13 +397,13 @@ private synchronized void readLogFile() throws DataAccessException, IOException // String logfileContent = FileUtils.readFileToString(logFile); if (logfileContent.length() != logFileLength){ - System.out.println("SimulationDataSpatialHdf5.readLog(), read "+logfileContent.length()+" of "+logFileLength+" bytes of log file"); + System.out.println("ChomboSimpleSimDataReader_NotUsed.readLog(), read "+logfileContent.length()+" of "+logFileLength+" bytes of log file"); } StringTokenizer st = new StringTokenizer(logfileContent); // so parse into 'dataFilenames' and 'dataTimes' arrays if (st.countTokens() % 4 != 0) { - throw new DataAccessException("SimulationDataSpatialHdf5.readLog(), tokens in each line should be factor of 4"); + throw new DataAccessException("ChomboSimpleSimDataReader_NotUsed.readLog(), tokens in each line should be factor of 4"); } while (st.hasMoreTokens()){ @@ -430,7 +413,7 @@ private synchronized void readLogFile() throws DataAccessException, IOException double time = Double.parseDouble(st.nextToken()); logfileEntryList.add(new SimLogFileEntry(iteration, simFileName, zipFileName, time)); } - VCMongoMessage.sendTrace("SimulationDataSpatialHdf5.readLog() <>"); + VCMongoMessage.sendTrace("ChomboSimpleSimDataReader_NotUsed.readLog() <>"); } public double[] getDataTimes() { diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/DataSet.java b/vcell-core/src/main/java/cbit/vcell/simdata/DataSet.java index 7927f3a58e..472e1ac1ce 100644 --- a/vcell-core/src/main/java/cbit/vcell/simdata/DataSet.java +++ b/vcell-core/src/main/java/cbit/vcell/simdata/DataSet.java @@ -19,31 +19,19 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; -import java.util.List; import java.util.Vector; //import java.util.zip.ZipEntry; -import java.util.zip.ZipEntry; //import java.util.zip.ZipFile; -import javax.swing.tree.DefaultMutableTreeNode; - +import cbit.vcell.solvers.mb.MovingBoundaryReader; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipFile; -import cbit.vcell.math.Variable; import cbit.vcell.math.VariableType; import cbit.vcell.simdata.SimulationData.SolverDataType; -import cbit.vcell.solvers.CartesianMeshMovingBoundary.MBSDataGroup; -import cbit.vcell.solvers.CartesianMeshMovingBoundary.MSBDataAttribute; -import cbit.vcell.solvers.CartesianMeshMovingBoundary.MSBDataAttributeValue; -import ncsa.hdf.object.Attribute; -import ncsa.hdf.object.Dataset; -import ncsa.hdf.object.FileFormat; -import ncsa.hdf.object.Group; -import ncsa.hdf.object.HObject; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.vcell.vis.io.ChomboFileReader; public class DataSet implements java.io.Serializable { @@ -85,7 +73,7 @@ public static double[] fetchSimData(String varName, File file) throws IOExceptio if (solverDataType == SolverDataType.MBSData) { try { - data = readMBSData(varName, time); + data = MovingBoundarySimDataReader.readMBSData(fileName, dataBlockList, varName, time); } catch(Exception e) { throw new IOException(e.getMessage(), e); } @@ -94,7 +82,7 @@ public static double[] fetchSimData(String varName, File file) throws IOExceptio { if (zipFile != null && isChombo(zipFile)) { try { - data = readHdf5VariableSolution(zipFile, new File(fileName).getName(), varName); + data = ChomboSimDataReader.readHdf5VariableSolution(zipFile, new File(fileName).getName(), varName); } catch(Exception e) { throw new IOException(e.getMessage(), e); } @@ -250,7 +238,7 @@ int[] getVariableTypeIntegers() { * Creation date: (6/23/2004 9:37:26 AM) * @return java.util.zip.ZipFile */ -protected static ZipFile openZipFile(File zipFile) throws IOException { +public static ZipFile openZipFile(File zipFile) throws IOException { for (int i = 0; i < 20; i ++) { try { return new org.apache.commons.compress.archivers.zip.ZipFile(zipFile); @@ -288,7 +276,7 @@ void read(File file, File zipFile, SolverDataType solverDataType) throws IOExcep if (solverDataType == SolverDataType.MBSData) { try { - readMBSDataMetadata(); + MovingBoundarySimDataReader.readMBSDataMetadata(fileName, dataBlockList); } catch (Exception e) { throw new IOException(e.getMessage(),e); } @@ -324,7 +312,7 @@ void read(File file, File zipFile, SolverDataType solverDataType) throws IOExcep if(is != null && zipFile!=null && isChombo(zipFile)){ try { - readHdf5SolutionMetaData(is); + ChomboSimDataReader.readHdf5SolutionMetaData(is, dataBlockList); } catch (Exception e) { throw new IOException(e.getMessage(),e); } @@ -349,187 +337,12 @@ void read(File file, File zipFile, SolverDataType solverDataType) throws IOExcep } } -private static boolean isChombo(File zipFile){ +public static boolean isChombo(File zipFile){ return zipFile.getName().endsWith(".hdf5.zip"); } -private static File createTempHdf5File(InputStream is) throws IOException -{ - OutputStream out = null; - try{ - File tempFile = File.createTempFile("temp", "hdf5"); - out=new FileOutputStream(tempFile); - byte buf[] = new byte[1024]; - int len; - while((len=is.read(buf))>0) { - out.write(buf,0,len); - } - return tempFile; - } - finally - { - try { - if (out != null) { - out.close(); - } - } catch (Exception ex) { - // ignore - } - } -} - -static File createTempHdf5File(ZipFile zipFile, String fileName) throws IOException -{ - InputStream is = null; - try - { - ZipEntry dataEntry = zipFile.getEntry(fileName); - is = zipFile.getInputStream((ZipArchiveEntry) dataEntry); - return createTempHdf5File(is); - } - finally - { - try - { - if (is != null) - { - is.close(); - } - } - catch (Exception ex) - { - // ignore - } - } -} - - -private static File createTempHdf5File(File zipFile, String fileName) throws IOException -{ - ZipFile zipZipFile = null; - try - { - zipZipFile = openZipFile(zipFile); - return createTempHdf5File(zipZipFile, fileName); - } - finally - { - try - { - if (zipZipFile != null) - { - zipZipFile.close(); - } - } - catch (Exception ex) - { - // ignore - } - } -} -private void readHdf5SolutionMetaData(InputStream is) throws Exception -{ - File tempFile = null; - FileFormat solFile = null; - try{ - tempFile = createTempHdf5File(is); - - FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); - solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ); - solFile.open(); - DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode(); - Group rootGroup = (Group)rootNode.getUserObject(); - List solGroups = rootGroup.getMemberList(); - - for (HObject memberGroup : solGroups) - { - if (memberGroup instanceof Group && memberGroup.getName().equals("solution")) - { - Group solGroup = (Group) memberGroup; - List memberList = solGroup.getMemberList(); - for (HObject member : memberList) - { - if (!(member instanceof Dataset)){ - continue; - } - Dataset dataset = (Dataset)member; - String dsname = dataset.getName(); - int vt = -1; - String domain = null; - List solAttrList = dataset.getMetadata(); - for (Attribute attr : solAttrList) - { - String attrName = attr.getName(); - if(attrName.equals("variable type")){ - Object obj = attr.getValue(); - vt = ((int[])obj)[0]; - } else if (attrName.equals("domain")) { - Object obj = attr.getValue(); - domain = ((String[])obj)[0]; - } - } - long[] dims = dataset.getDims(); - String varName = domain == null ? dsname : domain + Variable.COMBINED_IDENTIFIER_SEPARATOR + dsname; - dataBlockList.addElement(DataBlock.createDataBlock(varName, vt, (int) dims[0], 0)); - } - break; - } - } - } finally { - try { - if (solFile != null) { - solFile.close(); - } - if (tempFile != null) { - if (!tempFile.delete()) { - System.err.println("couldn't delete temp file " + tempFile); - } - } - } catch(Exception e) { - // ignore - } - } -} - - -static double[] readHdf5VariableSolution(File zipfile, String fileName, String varName) throws Exception{ - - File tempFile = null; - FileFormat solFile = null; - try{ - tempFile = createTempHdf5File(zipfile, fileName); - - FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); - solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ); - solFile.open(); - if (varName != null) - { - String varPath = Hdf5Utils.getVarSolutionPath(varName); - HObject solObj = FileFormat.findObject(solFile, varPath); - if (solObj instanceof Dataset) - { - Dataset dataset = (Dataset)solObj; - return (double[]) dataset.read(); - } - } - } finally { - try { - if (solFile != null) { - solFile.close(); - } - if (tempFile != null) { - if (!tempFile.delete()) { - System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath()); - } - } - } catch(Exception e) { - // ignore - } - } - return null; -} -public static void writeNew(File file, String[] varNameArr, VariableType[] varTypeArr, org.vcell.util.ISize size, double[][] dataArr) throws IOException { + public static void writeNew(File file, String[] varNameArr, VariableType[] varTypeArr, org.vcell.util.ISize size, double[][] dataArr) throws IOException { FileOutputStream fos = null; BufferedOutputStream bos = null; @@ -580,289 +393,4 @@ public static void writeNew(File file, String[] varNameArr, VariableType[] varTy } } - static double[] readChomboExtrapolatedValues(String varName, File pdeFile, File zipFile) throws IOException { - double[] data = null; - if (zipFile != null && isChombo(zipFile)) { - File tempFile = null; - FileFormat solFile = null; - try{ - tempFile = createTempHdf5File(zipFile, pdeFile.getName()); - - FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); - solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ); - solFile.open(); - data = readChomboExtrapolatedValues(varName, solFile); - } catch(Exception e) { - throw new IOException(e.getMessage(), e); - } finally { - try { - if (solFile != null) { - solFile.close(); - } - if (tempFile != null) { - if (!tempFile.delete()) { - System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath()); - } - } - } catch(Exception e) { - // ignore - } - } - } - return data; - } - - static double[] readChomboExtrapolatedValues(String varName, FileFormat solFile) throws Exception { - double data[] = null; - if (varName != null) - { - String varPath = Hdf5Utils.getVolVarExtrapolatedValuesPath(varName); - HObject solObj = FileFormat.findObject(solFile, varPath); - if (solObj == null) - { - throw new IOException("Extrapolated values for variable '" + varName + "' does not exist in the results."); - } - if (solObj instanceof Dataset) - { - Dataset dataset = (Dataset)solObj; - return (double[]) dataset.read(); - } - } - return data; - } - - private void readMBSDataMetadata() throws Exception - { - FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); - FileFormat solFile = null; - try { - solFile = fileFormat.createInstance(fileName, FileFormat.READ); - solFile.open(); - DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode(); - Group rootGroup = (Group)rootNode.getUserObject(); - Group solutionGroup = null; - for (Object member : rootGroup.getMemberList()) - { - String memberName = ((HObject)member).getName(); - if (member instanceof Group) - { - MBSDataGroup group = MBSDataGroup.valueOf(memberName); - if (group == MBSDataGroup.Solution) - { - solutionGroup = (Group) member; - break; - } - } - } - if (solutionGroup == null) - { - throw new Exception("Group " + MBSDataGroup.Solution + " not found"); - } - - // find any timeGroup - Group timeGroup = null; - for (Object member : solutionGroup.getMemberList()) - { - String memberName = ((HObject)member).getName(); - if (member instanceof Group && memberName.startsWith("time")) - { - timeGroup = (Group) member; - break; - } - } - - if (timeGroup == null) - { - throw new Exception("No time group found"); - } - - // find all the datasets in that time group - for (Object member : timeGroup.getMemberList()) - { - if (member instanceof Dataset) - { - List solAttrList = ((Dataset)member).getMetadata(); - int size = 0; - String varName = null; - VariableType varType = null; - for (Attribute attr : solAttrList) - { - String attrName = attr.getName(); - Object attrValue = attr.getValue(); - if(attrName.equals(MSBDataAttribute.name.name())) - { - varName = ((String[]) attrValue)[0]; - } - else if (attrName.equals(MSBDataAttribute.size.name())) - { - size = ((int[]) attrValue)[0]; - } - else if (attrName.equals(MSBDataAttribute.type.name())) - { - String vt = ((String[]) attrValue)[0]; - if (vt.equals(MSBDataAttributeValue.Point.name())) - { - varType = VariableType.POINT_VARIABLE; - } - else if (vt.equals(MSBDataAttributeValue.Volume.name())) - { - varType = VariableType.VOLUME; - } - else if (vt.equals(MSBDataAttributeValue.PointSubDomain.name())) - { - // Position for PointSubdomain - } - } - } - if (varType == VariableType.VOLUME) - { - // only display volume - dataBlockList.addElement(DataBlock.createDataBlock(varName, varType.getType(), size, 0)); - } - if (varType == VariableType.POINT_VARIABLE) - { - // only display volume - dataBlockList.addElement(DataBlock.createDataBlock(varName, varType.getType(), size, 0)); - } - - } - } - } - finally - { - if (solFile != null) - { - try { - solFile.close(); - } catch (Exception e) { - // ignore - } - } - } - } - - private double[] readMBSData(String varName, Double time) throws Exception { - FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); - FileFormat solFile = null; - double[] data = null; - try { - solFile = fileFormat.createInstance(fileName, FileFormat.READ); - solFile.open(); - DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode(); - Group rootGroup = (Group)rootNode.getUserObject(); - Group solutionGroup = null; - for (Object member : rootGroup.getMemberList()) - { - String memberName = ((HObject)member).getName(); - if (member instanceof Group) - { - MBSDataGroup group = MBSDataGroup.valueOf(memberName); - if (group == MBSDataGroup.Solution) - { - solutionGroup = (Group) member; - break; - } - } - } - if (solutionGroup == null) - { - throw new Exception("Group " + MBSDataGroup.Solution + " not found"); - } - - int varIndex = -1; - int size = 0; - for (int i = 0; i < dataBlockList.size(); ++ i) - { - DataBlock dataBlock = dataBlockList.get(i); - if (dataBlock.getVarName().equals(varName)) - { - varIndex = i; - size = dataBlock.getSize(); - break; - } - } - - if (varIndex == -1) - { - throw new Exception("Variable " + varName + " not found"); - } - - // find time group for that time - Group timeGroup = null; - for (Object member : solutionGroup.getMemberList()) - { - if (member instanceof Group) - { - Group group = (Group)member; - List dsAttrList = group.getMetadata(); - Attribute timeAttribute = null; - for (Attribute attr : dsAttrList) - { - if (attr.getName().equals(MSBDataAttribute.time.name())) - { - timeAttribute = attr; - break; - } - } - if (timeAttribute != null) - { - double t = ((double[]) timeAttribute.getValue())[0]; - if (Math.abs(t - time) < 1e-8) - { - timeGroup = group; - break; - } - } - } - } - - if (timeGroup == null) - { - throw new Exception("No time group found for time=" + time); - } - - // find variable dataset - Dataset varDataset = null; - for (Object member : timeGroup.getMemberList()) - { - if (member instanceof Dataset) - { - List dsAttrList = ((Dataset)member).getMetadata(); - String var = null; - for (Attribute attr : dsAttrList) - { - if (attr.getName().equals(MSBDataAttribute.name.name())) - { - var = ((String[]) attr.getValue())[0]; - break; - } - } - if (var != null && var.equals(varName)) - { - varDataset = (Dataset) member; - break; - } - } - } - if (varDataset == null) - { - throw new Exception("Data for Variable " + varName + " at time " + time + " not found"); - } - - data = new double[size]; - System.arraycopy((double[])varDataset.getData(), 0, data, 0, size); - return data; - } - finally - { - if (solFile != null) - { - try { - solFile.close(); - } catch (Exception e) { - // ignore - } - } - } - } } diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/MovingBoundarySimDataReader.java b/vcell-core/src/main/java/cbit/vcell/simdata/MovingBoundarySimDataReader.java new file mode 100644 index 0000000000..651a6617cb --- /dev/null +++ b/vcell-core/src/main/java/cbit/vcell/simdata/MovingBoundarySimDataReader.java @@ -0,0 +1,247 @@ +package cbit.vcell.simdata; + +import cbit.vcell.math.VariableType; +import cbit.vcell.solvers.CartesianMeshMovingBoundary; +import ncsa.hdf.object.*; + +import javax.swing.tree.DefaultMutableTreeNode; +import java.util.List; +import java.util.Vector; + +public class MovingBoundarySimDataReader { + public static void readMBSDataMetadata(String fileName, Vector dataBlockList) throws Exception + { + FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); + FileFormat solFile = null; + try { + solFile = fileFormat.createInstance(fileName, FileFormat.READ); + solFile.open(); + DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode(); + Group rootGroup = (Group)rootNode.getUserObject(); + Group solutionGroup = null; + for (Object member : rootGroup.getMemberList()) + { + String memberName = ((HObject)member).getName(); + if (member instanceof Group) + { + CartesianMeshMovingBoundary.MBSDataGroup group = CartesianMeshMovingBoundary.MBSDataGroup.valueOf(memberName); + if (group == CartesianMeshMovingBoundary.MBSDataGroup.Solution) + { + solutionGroup = (Group) member; + break; + } + } + } + if (solutionGroup == null) + { + throw new Exception("Group " + CartesianMeshMovingBoundary.MBSDataGroup.Solution + " not found"); + } + + // find any timeGroup + Group timeGroup = null; + for (Object member : solutionGroup.getMemberList()) + { + String memberName = ((HObject)member).getName(); + if (member instanceof Group && memberName.startsWith("time")) + { + timeGroup = (Group) member; + break; + } + } + + if (timeGroup == null) + { + throw new Exception("No time group found"); + } + + // find all the datasets in that time group + for (Object member : timeGroup.getMemberList()) + { + if (member instanceof Dataset) + { + List solAttrList = ((Dataset)member).getMetadata(); + int size = 0; + String varName = null; + VariableType varType = null; + for (Attribute attr : solAttrList) + { + String attrName = attr.getName(); + Object attrValue = attr.getValue(); + if(attrName.equals(CartesianMeshMovingBoundary.MSBDataAttribute.name.name())) + { + varName = ((String[]) attrValue)[0]; + } + else if (attrName.equals(CartesianMeshMovingBoundary.MSBDataAttribute.size.name())) + { + size = ((int[]) attrValue)[0]; + } + else if (attrName.equals(CartesianMeshMovingBoundary.MSBDataAttribute.type.name())) + { + String vt = ((String[]) attrValue)[0]; + if (vt.equals(CartesianMeshMovingBoundary.MSBDataAttributeValue.Point.name())) + { + varType = VariableType.POINT_VARIABLE; + } + else if (vt.equals(CartesianMeshMovingBoundary.MSBDataAttributeValue.Volume.name())) + { + varType = VariableType.VOLUME; + } + else if (vt.equals(CartesianMeshMovingBoundary.MSBDataAttributeValue.PointSubDomain.name())) + { + // Position for PointSubdomain + } + } + } + if (varType == VariableType.VOLUME) + { + // only display volume + dataBlockList.addElement(DataBlock.createDataBlock(varName, varType.getType(), size, 0)); + } + if (varType == VariableType.POINT_VARIABLE) + { + // only display volume + dataBlockList.addElement(DataBlock.createDataBlock(varName, varType.getType(), size, 0)); + } + + } + } + } + finally + { + if (solFile != null) + { + try { + solFile.close(); + } catch (Exception e) { + // ignore + } + } + } + } + + public static double[] readMBSData(String fileName, Vector dataBlockList, String varName, Double time) throws Exception { + FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); + FileFormat solFile = null; + double[] data = null; + try { + solFile = fileFormat.createInstance(fileName, FileFormat.READ); + solFile.open(); + DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode(); + Group rootGroup = (Group)rootNode.getUserObject(); + Group solutionGroup = null; + for (Object member : rootGroup.getMemberList()) + { + String memberName = ((HObject)member).getName(); + if (member instanceof Group) + { + CartesianMeshMovingBoundary.MBSDataGroup group = CartesianMeshMovingBoundary.MBSDataGroup.valueOf(memberName); + if (group == CartesianMeshMovingBoundary.MBSDataGroup.Solution) + { + solutionGroup = (Group) member; + break; + } + } + } + if (solutionGroup == null) + { + throw new Exception("Group " + CartesianMeshMovingBoundary.MBSDataGroup.Solution + " not found"); + } + + int varIndex = -1; + int size = 0; + for (int i = 0; i < dataBlockList.size(); ++ i) + { + DataBlock dataBlock = dataBlockList.get(i); + if (dataBlock.getVarName().equals(varName)) + { + varIndex = i; + size = dataBlock.getSize(); + break; + } + } + + if (varIndex == -1) + { + throw new Exception("Variable " + varName + " not found"); + } + + // find time group for that time + Group timeGroup = null; + for (Object member : solutionGroup.getMemberList()) + { + if (member instanceof Group) + { + Group group = (Group)member; + List dsAttrList = group.getMetadata(); + Attribute timeAttribute = null; + for (Attribute attr : dsAttrList) + { + if (attr.getName().equals(CartesianMeshMovingBoundary.MSBDataAttribute.time.name())) + { + timeAttribute = attr; + break; + } + } + if (timeAttribute != null) + { + double t = ((double[]) timeAttribute.getValue())[0]; + if (Math.abs(t - time) < 1e-8) + { + timeGroup = group; + break; + } + } + } + } + + if (timeGroup == null) + { + throw new Exception("No time group found for time=" + time); + } + + // find variable dataset + Dataset varDataset = null; + for (Object member : timeGroup.getMemberList()) + { + if (member instanceof Dataset) + { + List dsAttrList = ((Dataset)member).getMetadata(); + String var = null; + for (Attribute attr : dsAttrList) + { + if (attr.getName().equals(CartesianMeshMovingBoundary.MSBDataAttribute.name.name())) + { + var = ((String[]) attr.getValue())[0]; + break; + } + } + if (var != null && var.equals(varName)) + { + varDataset = (Dataset) member; + break; + } + } + } + if (varDataset == null) + { + throw new Exception("Data for Variable " + varName + " at time " + time + " not found"); + } + + data = new double[size]; + System.arraycopy((double[])varDataset.getData(), 0, data, 0, size); + return data; + } + finally + { + if (solFile != null) + { + try { + solFile.close(); + } catch (Exception e) { + // ignore + } + } + } + } + +} diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/SimDataReader.java b/vcell-core/src/main/java/cbit/vcell/simdata/SimDataReader.java index 7f4f94341a..667f8acb11 100644 --- a/vcell-core/src/main/java/cbit/vcell/simdata/SimDataReader.java +++ b/vcell-core/src/main/java/cbit/vcell/simdata/SimDataReader.java @@ -9,23 +9,14 @@ */ package cbit.vcell.simdata; -import java.io.DataInputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.Arrays; -import java.util.zip.ZipEntry; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipFile; import org.vcell.util.DataAccessException; -import cbit.vcell.math.InsideVariable; -import cbit.vcell.math.OutsideVariable; -import ncsa.hdf.object.Dataset; -import ncsa.hdf.object.FileFormat; -import ncsa.hdf.object.HObject; +import java.io.*; +import java.util.Arrays; +import java.util.zip.ZipEntry; /** @@ -131,93 +122,26 @@ public void close(){ } } -private void getNextDataAtCurrentTimeChombo(double[][] returnValues) throws Exception { - if (zipFilenNames == null || zipFilenNames[masterTimeIndex] == null) { - return; - } - if (currentZipFile == null || !currentZipFileName.equals(zipFilenNames[masterTimeIndex])) { - close(); - currentZipFile = new ZipFile(zipFilenNames[masterTimeIndex]); - currentZipFileName=zipFilenNames[masterTimeIndex]; - } - File tempFile = null; - FileFormat solFile = null; - try { - tempFile = DataSet.createTempHdf5File(currentZipFile, simDataFileNames[masterTimeIndex]); - - FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); - solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ); - solFile.open(); - - for(int k = 0; k < varNames.length; ++ k) { - try { - boolean bExtrapolatedValue = false; - String varName = varNames[k]; - if (varName.endsWith(InsideVariable.INSIDE_VARIABLE_SUFFIX)) - { - bExtrapolatedValue = true; - varName = varName.substring(0, varName.lastIndexOf(InsideVariable.INSIDE_VARIABLE_SUFFIX)); - } - else if (varName.endsWith(OutsideVariable.OUTSIDE_VARIABLE_SUFFIX)) - { - bExtrapolatedValue = true; - varName = varName.substring(0, varName.lastIndexOf(OutsideVariable.OUTSIDE_VARIABLE_SUFFIX)); - } - double[] sol = null; - if (bExtrapolatedValue) - { - sol = DataSet.readChomboExtrapolatedValues(varName, solFile); - } - else - { - String varPath = Hdf5Utils.getVarSolutionPath(varNames[k]); - HObject solObj = FileFormat.findObject(solFile, varPath); - if (solObj instanceof Dataset) { - Dataset dataset = (Dataset)solObj; - sol = (double[]) dataset.read(); - } - } - if (sol != null) - { - for(int l = 0;l < varIndexes[k].length; ++ l) { - int idx = varIndexes[k][l]; - double val = sol[idx]; - returnValues[k][l] = val; - } - } - } catch (Exception e) { - throw new DataAccessException(e.getMessage(), e); - } - } - } finally { - try { - if (solFile != null) { - solFile.close(); - } - if (tempFile != null) { - if (!tempFile.delete()) { - System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath()); - } - } - } catch(Exception e) { - // ignore - } - } - - ++ masterTimeIndex; - if (masterTimeIndex >= times.length) { - close(); - } -} - -/** + /** * Insert the method's description here. * Creation date: (10/26/2004 10:18:50 AM) */ public void getNextDataAtCurrentTime(double[][] returnValues) throws IOException, DataAccessException { if (isChombo) { try { - getNextDataAtCurrentTimeChombo(returnValues); + if (zipFilenNames == null || zipFilenNames[masterTimeIndex] == null) { + return; + } + if (currentZipFile == null || !currentZipFileName.equals(zipFilenNames[masterTimeIndex])) { + close(); + currentZipFile = new ZipFile(zipFilenNames[masterTimeIndex]); + currentZipFileName=zipFilenNames[masterTimeIndex]; + } + ChomboSimDataReader.getNextDataAtCurrentTimeChombo(returnValues, currentZipFile, varNames, varIndexes, simDataFileNames, masterTimeIndex); + ++ masterTimeIndex; + if (masterTimeIndex >= times.length) { + close(); + } } catch (Exception e) { throw new DataAccessException(e.getMessage(), e); } diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/SimulationData.java b/vcell-core/src/main/java/cbit/vcell/simdata/SimulationData.java index be8890f214..f1f9b19208 100644 --- a/vcell-core/src/main/java/cbit/vcell/simdata/SimulationData.java +++ b/vcell-core/src/main/java/cbit/vcell/simdata/SimulationData.java @@ -792,6 +792,7 @@ else if (odeIdentifier.equals(NFSIM_DATA_IDENTIFIER)) ODEDataInfo odeDataInfo = new ODEDataInfo(vcDataId.getOwner(), vcDataId.getID(), lastModified); VCAssert.assertFalse(odeSimData == null, "should have returned null already"); byte[] hdf5FileBytes = null; + // try to open non-spatial stochastic MultiTrialStats HDF5 file File hdf5File = new File(file.getParent(),file.getName()+"_hdf5"); if(hdf5File.exists()) { hdf5FileBytes = Files.readAllBytes(hdf5File.toPath()); @@ -1611,7 +1612,7 @@ private synchronized void readMesh(File meshFile,File membraneMeshMetricsFile) t // read meshFile,MembraneMeshMetrics and parse into 'mesh' object // if(isChombo()){ -// SimulationDataSpatialHdf5 simulationDataSpatialHdf5 = new SimulationDataSpatialHdf5(vcDataId,userDirectory,null); +// ChomboSimpleSimDataReader_NotUsed simulationDataSpatialHdf5 = new ChomboSimpleSimDataReader_NotUsed(vcDataId,userDirectory,null); // simulationDataSpatialHdf5.readVarAndFunctionDataIdentifiers(); mesh = CartesianMeshChombo.readMeshFile(meshFile); // test serialization @@ -2016,7 +2017,7 @@ public synchronized SimDataBlock getChomboExtrapolatedValues(String varName, dou throw new DataAccessException("data not found for variable " + varName); } final String varNameInDataSet = dsi.getQualifiedName(); - double data[] = DataSet.readChomboExtrapolatedValues(varNameInDataSet, pdeFile, zipFile); + double data[] = ChomboSimDataReader.readChomboExtrapolatedValues(varNameInDataSet, pdeFile, zipFile); VariableType variableType = VariableType.MEMBRANE; PDEDataInfo pdeDataInfo = new PDEDataInfo(vcDataId.getOwner(),vcDataId.getID(),varName,time,lastModified); return data == null ? null : new SimDataBlock(pdeDataInfo,data,variableType); From b3e5e34f6264bfbe38ad94150b5b19a98f212570 Mon Sep 17 00:00:00 2001 From: Jim Schaff Date: Wed, 24 Apr 2024 12:04:49 -0400 Subject: [PATCH 08/16] remove vcell-web from maven to fix build --- pom.xml | 1 - 1 file changed, 1 deletion(-) diff --git a/pom.xml b/pom.xml index 385869fb5a..8fd9f5b665 100644 --- a/pom.xml +++ b/pom.xml @@ -64,7 +64,6 @@ vcell-util vcell-vmicro vcell-cli - vcell-web vcell-rest vcell-restclient From d43a513517e4eb4d2a6b1ddd8d5866fdb9328948 Mon Sep 17 00:00:00 2001 From: Jim Schaff Date: Wed, 24 Apr 2024 12:53:00 -0400 Subject: [PATCH 09/16] combine Hdf5Reader into ChomboFileReader --- .../org/vcell/vis/io/ChomboFileReader.java | 244 +++++++++++++++--- .../java/org/vcell/vis/io/Hdf5Reader.java | 195 -------------- 2 files changed, 213 insertions(+), 226 deletions(-) delete mode 100644 vcell-core/src/main/java/org/vcell/vis/io/Hdf5Reader.java diff --git a/vcell-core/src/main/java/org/vcell/vis/io/ChomboFileReader.java b/vcell-core/src/main/java/org/vcell/vis/io/ChomboFileReader.java index 2034ae1d62..4627e0053b 100644 --- a/vcell-core/src/main/java/org/vcell/vis/io/ChomboFileReader.java +++ b/vcell-core/src/main/java/org/vcell/vis/io/ChomboFileReader.java @@ -1,10 +1,15 @@ package org.vcell.vis.io; import java.io.File; +import java.util.ArrayList; import java.util.List; +import java.util.StringTokenizer; +import java.util.Vector; import javax.swing.tree.DefaultMutableTreeNode; +import ncsa.hdf.object.h5.H5CompoundDS; +import ncsa.hdf.object.h5.H5ScalarDS; import org.vcell.vis.chombo.ChomboBoundaries; import org.vcell.vis.chombo.ChomboBoundaries.BorderCellInfo; import org.vcell.vis.chombo.ChomboBoundaries.MeshMetrics; @@ -34,7 +39,78 @@ public class ChomboFileReader { private static final String MESH_ATTR_DIMENSION = "dimension"; private static final String MESH_ATTR_ORIGIN = "origin"; private static final String MESH_ATTR_EXTENT = "extent"; - + + + /** + * Z = boolean + [B = byte + [S = short + [I = int + [J = long + [F = float + [D = double + [C = char + [L = any non-primitives(Object) + * @author schaff + * + */ + static abstract class DataColumn { + private String colName; + public DataColumn(String name){ + this.colName = name; + } + public abstract int getNumRows(); + public abstract double getValue(int index); + } + + static class IntColumn extends DataColumn { + int[] data; + public IntColumn(String name, int[] data){ + super(name); + this.data = data; + } + @Override + public int getNumRows(){ + return data.length; + } + @Override + public double getValue(int index){ + return data[index]; + } + } + + static class LongColumn extends DataColumn { + long[] data; + public LongColumn(String name, long[] data){ + super(name); + this.data = data; + } + @Override + public int getNumRows(){ + return data.length; + } + @Override + public double getValue(int index){ + return data[index]; + } + } + + static class DoubleColumn extends DataColumn { + double[] data; + public DoubleColumn(String name, double[] data){ + super(name); + this.data = data; + } + @Override + public int getNumRows(){ + return data.length; + } + @Override + public double getValue(int index){ + return data[index]; + } + } + private static ChomboMeshData readMesh(String meshFileName, String vol0FileName) throws Exception{ ChomboMesh chomboMesh = new ChomboMesh(); @@ -52,11 +128,11 @@ private static ChomboMeshData readMesh(String meshFileName, String vol0FileName) DefaultMutableTreeNode meshRootNode = (DefaultMutableTreeNode)meshFile.getRootNode(); Group meshRootGroup = (Group)meshRootNode.getUserObject(); - Group meshGroup = Hdf5Reader.getChildGroup(meshRootGroup,"mesh"); + Group meshGroup = getChildGroup(meshRootGroup,"mesh"); - chomboMesh.setDimension(Hdf5Reader.getIntAttribute(meshGroup,MESH_ATTR_DIMENSION)); - chomboMesh.setExtent(Hdf5Reader.getVect3DAttribute(meshGroup,MESH_ATTR_EXTENT,1.0)); - chomboMesh.setOrigin(Hdf5Reader.getVect3DAttribute(meshGroup,MESH_ATTR_ORIGIN,0.0)); + chomboMesh.setDimension(getIntAttribute(meshGroup,MESH_ATTR_DIMENSION)); + chomboMesh.setExtent(getVect3DAttribute(meshGroup,MESH_ATTR_EXTENT,1.0)); + chomboMesh.setOrigin(getVect3DAttribute(meshGroup,MESH_ATTR_ORIGIN,0.0)); // it's very wasteful here, but what can I do? CartesianMeshChombo cartesianMeshChombo = CartesianMeshChombo.readMeshFile(new File(meshFileName)); @@ -65,10 +141,10 @@ private static ChomboMeshData readMesh(String meshFileName, String vol0FileName) chomboMesh.addFeaturePhase(fpv.feature, fpv.iphase); } - //Hdf5Reader.DataColumn[] metricsColumns = Hdf5Reader.getDataTable(meshGroup,METRICS_DATASET); + //DataColumn[] metricsColumns = getDataTable(meshGroup,METRICS_DATASET); if (chomboMesh.getDimension()==2){ - Hdf5Reader.DataColumn[] segmentColumns = Hdf5Reader.getDataTable(meshGroup,"segments"); - Hdf5Reader.DataColumn[] verticesColumns = Hdf5Reader.getDataTable(meshGroup,"vertices"); + DataColumn[] segmentColumns = getDataTable(meshGroup,"segments"); + DataColumn[] verticesColumns = getDataTable(meshGroup,"vertices"); ChomboBoundaries boundaries = chomboMesh.getBoundaries(); int numVertices = verticesColumns[0].getNumRows(); int numSegments = segmentColumns[0].getNumRows(); @@ -85,7 +161,7 @@ private static ChomboMeshData readMesh(String meshFileName, String vol0FileName) boundaries.addSegment(new ChomboBoundaries.Segment(chomboIndex, v1, v2)); } }else if (chomboMesh.getDimension()==3){ - Hdf5Reader.DataColumn[] surfaceTriangleColumns = Hdf5Reader.getDataTable(meshGroup,"surface triangles"); + DataColumn[] surfaceTriangleColumns = getDataTable(meshGroup,"surface triangles"); ChomboBoundaries boundaries = chomboMesh.getBoundaries(); int numTriangles = surfaceTriangleColumns[0].getNumRows(); for (int row=0;row children = vcellGroup.getMemberList(); @@ -267,4 +343,110 @@ private static void readMembraneVarData(ChomboMeshData chomboMeshData, Group roo } } } + + private static Attribute getAttribute(Group group, String name) throws Exception{ + List attributes = group.getMetadata(); + for (Attribute attr : attributes){ + if (attr.getName().equals(name)){ + return attr; + } + } + throw new RuntimeException("failed to find attribute "+name); + } + + private static double getDoubleAttribute(Group group, String name) throws Exception{ + Attribute attr = getAttribute(group,name); + return ((double[])attr.getValue())[0]; + } + + private static float getFloatAttribute(Group group, String name) throws Exception{ + Attribute attr = getAttribute(group,name); + return ((float[])attr.getValue())[0]; + } + + private static int getIntAttribute(Group group, String name) throws Exception{ + Attribute attr = getAttribute(group,name); + return ((int[])attr.getValue())[0]; + } + + private static String getStringAttribute(Group group, String name) throws Exception{ + Attribute attr = getAttribute(group,name); + return ((String[])attr.getValue())[0]; + } + + private static Vect3D getVect3DAttribute(Group group, String name, double defaultZ) throws Exception{ + String str = getStringAttribute(group, name); + return parseAttrString(str,defaultZ); + } + + private static Group getChildGroup(Group group, String name){ + List memberList = group.getMemberList(); + for (HObject member : memberList) { + if (member.getName().equals(name)){ + if (member instanceof Group) { + return (Group)member; + }else{ + throw new RuntimeException("expecting type Group for group member '"+name+"'"); + } + } + } + throw new RuntimeException("child group '"+name+"' not found"); + } + + private static DataColumn[] getDataTable(Group group, String name) throws Exception{ + List memberList = group.getMemberList(); + for (HObject member : memberList) { + if (member.getName().equals(name)){ + if (member instanceof H5CompoundDS) { + H5CompoundDS compoundDataSet = (H5CompoundDS) member; + Vector columnValueArrays = (Vector)compoundDataSet.read(); + String[] columnNames = compoundDataSet.getMemberNames(); + ArrayList dataColumns = new ArrayList(); + for (int c=0;c valueList = new ArrayList(); + while (st.hasMoreTokens()) + { + String token = st.nextToken(); + valueList.add(Double.parseDouble(token)); + } + if (valueList.size()==2){ + return new Vect3D(valueList.get(0),valueList.get(1),defaultZ); + }else if (valueList.size()==3){ + return new Vect3D(valueList.get(0),valueList.get(1),valueList.get(2)); + }else{ + throw new RuntimeException("cannot parse, unexpected array size "+valueList.size()); + } + } } diff --git a/vcell-core/src/main/java/org/vcell/vis/io/Hdf5Reader.java b/vcell-core/src/main/java/org/vcell/vis/io/Hdf5Reader.java deleted file mode 100644 index 619ccc4061..0000000000 --- a/vcell-core/src/main/java/org/vcell/vis/io/Hdf5Reader.java +++ /dev/null @@ -1,195 +0,0 @@ -package org.vcell.vis.io; - -import java.util.ArrayList; -import java.util.List; -import java.util.StringTokenizer; -import java.util.Vector; - -import org.vcell.vis.core.Vect3D; - -import ncsa.hdf.object.Attribute; -import ncsa.hdf.object.Group; -import ncsa.hdf.object.HObject; -import ncsa.hdf.object.h5.H5CompoundDS; -import ncsa.hdf.object.h5.H5ScalarDS; - -public class Hdf5Reader { - - /** - * Z = boolean - [B = byte - [S = short - [I = int - [J = long - [F = float - [D = double - [C = char - [L = any non-primitives(Object) - * @author schaff - * - */ - public static abstract class DataColumn { - private String colName; - public DataColumn(String name){ - this.colName = name; - } - public abstract int getNumRows(); - public abstract double getValue(int index); - } - - public static class IntColumn extends DataColumn { - int[] data; - public IntColumn(String name, int[] data){ - super(name); - this.data = data; - } - @Override - public int getNumRows(){ - return data.length; - } - @Override - public double getValue(int index){ - return data[index]; - } - } - - public static class LongColumn extends DataColumn { - long[] data; - public LongColumn(String name, long[] data){ - super(name); - this.data = data; - } - @Override - public int getNumRows(){ - return data.length; - } - @Override - public double getValue(int index){ - return data[index]; - } - } - - public static class DoubleColumn extends DataColumn { - double[] data; - public DoubleColumn(String name, double[] data){ - super(name); - this.data = data; - } - @Override - public int getNumRows(){ - return data.length; - } - @Override - public double getValue(int index){ - return data[index]; - } - } - - public static Attribute getAttribute(Group group, String name) throws Exception{ - List attributes = group.getMetadata(); - for (Attribute attr : attributes){ - if (attr.getName().equals(name)){ - return attr; - } - } - throw new RuntimeException("failed to find attribute "+name); - } - - public static double getDoubleAttribute(Group group, String name) throws Exception{ - Attribute attr = getAttribute(group,name); - return ((double[])attr.getValue())[0]; - } - - public static float getFloatAttribute(Group group, String name) throws Exception{ - Attribute attr = getAttribute(group,name); - return ((float[])attr.getValue())[0]; - } - - public static int getIntAttribute(Group group, String name) throws Exception{ - Attribute attr = getAttribute(group,name); - return ((int[])attr.getValue())[0]; - } - - public static String getStringAttribute(Group group, String name) throws Exception{ - Attribute attr = getAttribute(group,name); - return ((String[])attr.getValue())[0]; - } - - public static Vect3D getVect3DAttribute(Group group, String name, double defaultZ) throws Exception{ - String str = getStringAttribute(group, name); - return parseAttrString(str,defaultZ); - } - - public static Group getChildGroup(Group group, String name){ - List memberList = group.getMemberList(); - for (HObject member : memberList) { - if (member.getName().equals(name)){ - if (member instanceof Group) { - return (Group)member; - }else{ - throw new RuntimeException("expecting type Group for group member '"+name+"'"); - } - } - } - throw new RuntimeException("child group '"+name+"' not found"); - } - - public static Hdf5Reader.DataColumn[] getDataTable(Group group, String name) throws Exception{ - List memberList = group.getMemberList(); - for (HObject member : memberList) { - if (member.getName().equals(name)){ - if (member instanceof H5CompoundDS) { - H5CompoundDS compoundDataSet = (H5CompoundDS) member; - Vector columnValueArrays = (Vector)compoundDataSet.read(); - String[] columnNames = compoundDataSet.getMemberNames(); - ArrayList dataColumns = new ArrayList(); - for (int c=0;c valueList = new ArrayList(); - while (st.hasMoreTokens()) - { - String token = st.nextToken(); - valueList.add(Double.parseDouble(token)); - } - if (valueList.size()==2){ - return new Vect3D(valueList.get(0),valueList.get(1),defaultZ); - }else if (valueList.size()==3){ - return new Vect3D(valueList.get(0),valueList.get(1),valueList.get(2)); - }else{ - throw new RuntimeException("cannot parse, unexpected array size "+valueList.size()); - } - } - - -} From 6730c08776d69d1ca0ba5b3da35d173216fe5a11 Mon Sep 17 00:00:00 2001 From: Jim Schaff Date: Fri, 26 Apr 2024 18:36:44 -0400 Subject: [PATCH 10/16] Hdf5File, Hdf5Writer Biosimulations prefix, hide HDF5Exceptions --- .../java/org/vcell/cli/run/ExecuteImpl.java | 9 +- .../java/org/vcell/cli/run/ExecutionJob.java | 17 ++-- ...5File.java => BiosimulationsHdf5File.java} | 96 ++++++++++--------- ...ter.java => BiosimulationsHdf5Writer.java} | 47 +++++---- .../vcell/cli/run/hdf5/Hdf5DataPreparer.java | 15 +-- .../run/hdf5/NonspatialResultsConverter.java | 24 +---- .../cli/run/hdf5/SpatialResultsConverter.java | 12 +-- ...java => BiosimulationsHdf5WriterTest.java} | 16 ++-- 8 files changed, 116 insertions(+), 120 deletions(-) rename vcell-cli/src/main/java/org/vcell/cli/run/hdf5/{Hdf5File.java => BiosimulationsHdf5File.java} (86%) rename vcell-cli/src/main/java/org/vcell/cli/run/hdf5/{Hdf5Writer.java => BiosimulationsHdf5Writer.java} (81%) rename vcell-cli/src/test/java/org/vcell/cli/run/hdf5/{Hdf5WriterTest.java => BiosimulationsHdf5WriterTest.java} (93%) diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/ExecuteImpl.java b/vcell-cli/src/main/java/org/vcell/cli/run/ExecuteImpl.java index 56fec53c25..f1556bee3e 100644 --- a/vcell-cli/src/main/java/org/vcell/cli/run/ExecuteImpl.java +++ b/vcell-cli/src/main/java/org/vcell/cli/run/ExecuteImpl.java @@ -2,7 +2,6 @@ import cbit.vcell.parser.ExpressionException; import cbit.vcell.solver.ode.ODESolverResultSet; -import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.vcell.cli.CLIRecordable; @@ -22,6 +21,8 @@ import java.util.List; import java.util.Map; +import static org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer.BiosimulationsHdfWriterException; + public class ExecuteImpl { private final static Logger logger = LogManager.getLogger(ExecuteImpl.class); @@ -65,7 +66,7 @@ public static void batchMode(File dirOfArchivesToProcess, File outputDir, CLIRec if (inputFileName.endsWith("omex")) runSingleExecOmex(inputFile, outputDir, cliLogger, bKeepTempFiles, bExactMatchOnly, bSmallMeshOverride); - } catch (ExecutionException | RuntimeException | HDF5Exception e){ + } catch (ExecutionException e){ logger.error("Error caught executing batch mode", e); Tracer.failure(e, "Error caught executing batch mode"); failedFiles.add(inputFileName); @@ -105,7 +106,7 @@ public static void batchMode(File dirOfArchivesToProcess, File outputDir, CLIRec private static void runSingleExecOmex(File inputFile, File outputDir, CLIRecordable cliLogger, boolean bKeepTempFiles, boolean bExactMatchOnly, boolean bSmallMeshOverride) - throws IOException, ExecutionException, PythonStreamException, HDF5Exception, InterruptedException { + throws IOException, ExecutionException, PythonStreamException, InterruptedException, BiosimulationsHdfWriterException { String bioModelBaseName = inputFile.getName().substring(0, inputFile.getName().indexOf(".")); // ".omex"?? Files.createDirectories(Paths.get(outputDir.getAbsolutePath() + File.separator + bioModelBaseName)); // make output subdir final boolean bEncapsulateOutput = true; @@ -230,7 +231,7 @@ public static void singleExecVcml(File vcmlFile, File outputDir, CLIRecordable c private static void singleExecOmex(File inputFile, File rootOutputDir, CLIRecordable cliRecorder, boolean bKeepTempFiles, boolean bExactMatchOnly, boolean bEncapsulateOutput, boolean bSmallMeshOverride, boolean bBioSimMode) - throws ExecutionException, PythonStreamException, IOException, InterruptedException, HDF5Exception { + throws ExecutionException, PythonStreamException, IOException, InterruptedException, BiosimulationsHdfWriterException { ExecutionJob requestedExecution = new ExecutionJob(inputFile, rootOutputDir, cliRecorder, bKeepTempFiles, bExactMatchOnly, bEncapsulateOutput, bSmallMeshOverride); diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/ExecutionJob.java b/vcell-cli/src/main/java/org/vcell/cli/run/ExecutionJob.java index e3f0d52e45..38bf0df18b 100644 --- a/vcell-cli/src/main/java/org/vcell/cli/run/ExecutionJob.java +++ b/vcell-cli/src/main/java/org/vcell/cli/run/ExecutionJob.java @@ -1,25 +1,22 @@ package org.vcell.cli.run; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.vcell.cli.CLIRecordable; import org.vcell.cli.PythonStreamException; import org.vcell.cli.exceptions.ExecutionException; +import org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer; import org.vcell.cli.run.hdf5.HDF5ExecutionResults; -import org.vcell.cli.run.hdf5.Hdf5DataContainer; import org.vcell.util.FileUtils; -import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; - -import org.vcell.cli.run.hdf5.Hdf5Writer; - -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.LogManager; - import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; +import static org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer.BiosimulationsHdfWriterException; + /** * Contains the code necessary to execute an Omex archive in VCell */ @@ -124,7 +121,7 @@ public void preprocessArchive() throws PythonStreamException, IOException { * @throws IOException if there are system I/O issues * @throws ExecutionException if an execution specfic error occurs */ - public void executeArchive(boolean isBioSimSedml) throws HDF5Exception, PythonStreamException, ExecutionException { + public void executeArchive(boolean isBioSimSedml) throws BiosimulationsHdfWriterException, PythonStreamException, ExecutionException { try { HDF5ExecutionResults masterHdf5File = new HDF5ExecutionResults(isBioSimSedml); this.queueAllSedml(); @@ -145,7 +142,7 @@ public void executeArchive(boolean isBioSimSedml) throws HDF5Exception, PythonSt if (hasSucceeded) logger.info("Processing of SedML succeeded.\n" + stats.toString()); else logger.error("Processing of SedML has failed.\n" + stats.toString()); } - Hdf5Writer.writeHdf5(masterHdf5File, new File(this.outputDir)); + BiosimulationsHdf5Writer.writeHdf5(masterHdf5File, new File(this.outputDir)); } catch(PythonStreamException e){ logger.error("Python-processing encountered fatal error. Execution is unable to properly continue.", e); diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5File.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5File.java similarity index 86% rename from vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5File.java rename to vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5File.java index d6c84b4d67..dd7b6ba472 100644 --- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5File.java +++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5File.java @@ -1,32 +1,29 @@ package org.vcell.cli.run.hdf5; -//import ncsa.hdf.hdf5lib.*; - -import java.io.File; -import java.io.IOException; - -import java.nio.ByteBuffer; -import java.nio.ByteOrder; - - -import java.util.List; -import java.util.Map; -import java.util.HashMap; import ncsa.hdf.hdf5lib.H5; import ncsa.hdf.hdf5lib.HDF5Constants; import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.vcell.cli.run.hdf5.Hdf5DataPreparer.Hdf5PreparedData; +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer.BiosimulationsHdfWriterException; + /** * Class to handle Hdf5 creation, data, and assist with I/O. */ -public class Hdf5File { +public class BiosimulationsHdf5File { // NB: Hdf5 group management is ***important***. - private final static Logger logger = LogManager.getLogger(Hdf5File.class); + private final static Logger logger = LogManager.getLogger(BiosimulationsHdf5File.class); private static boolean needToCreateFile = true; final private int H5F_ACC_TRUNC = HDF5Constants.H5F_ACC_TRUNC; @@ -47,7 +44,7 @@ public class Hdf5File { private Map pathToIdMap; private Map datasetToDataspaceMap; - private Hdf5File(){ + private BiosimulationsHdf5File(){ this.fileId = HDF5Constants.H5I_INVALID_HID; this.isOpen = false; @@ -55,26 +52,26 @@ private Hdf5File(){ this.idToPathMap = new HashMap(); this.pathToIdMap = new HashMap(); this.datasetToDataspaceMap = new HashMap<>(); - Hdf5File.needToCreateFile = false; + BiosimulationsHdf5File.needToCreateFile = false; } /** - * Creates an Hdf5File named "reports.h5" in the provided directory, and will throw exceptions where c-style error codes would be returned. + * Creates an BiosimulationsHdf5File named "reports.h5" in the provided directory, and will throw exceptions where c-style error codes would be returned. * * @param parentDir the directory to put the Hdf5 file inside. */ - public Hdf5File(File parentDir) { //"/home/ldrescher/VCell/hdf5Rebuild/testingDir" + public BiosimulationsHdf5File(File parentDir) { //"/home/ldrescher/VCell/hdf5Rebuild/testingDir" this(parentDir, true); } /** - * The main constructor for Hdf5File. Note the special interpretation of allowExceptions. + * The main constructor for BiosimulationsHdf5File. Note the special interpretation of allowExceptions. * * @param parentDir the directory to put the Hdf5 file inside of. * @param allowExceptions Whether to interperate C-style error codes as exceptions or let the user handle them. * Hdf5 Library-produced exceptions will still be generated regardless. */ - public Hdf5File(File parentDir, boolean allowExceptions){ + public BiosimulationsHdf5File(File parentDir, boolean allowExceptions){ this(parentDir, "reports.h5", allowExceptions); } @@ -90,14 +87,14 @@ public void printErrorStack() { } /** - * Complete constructor of `Hdf5File` + * Complete constructor of `BiosimulationsHdf5File` * * @param parentDir the directory to put the Hdf5 file inside. * @param filename name of the h5 file to write. * @param allowExceptions Whether to interpret C-style error codes as exceptions or let the user handle them. * Hdf5 Library-produced exceptions will still be generated regardless. */ - public Hdf5File(File parentDir, String filename, boolean allowExceptions){ + public BiosimulationsHdf5File(File parentDir, String filename, boolean allowExceptions){ this(); this.javaFileTarget = new File(parentDir, filename); this.allowExceptions = allowExceptions; @@ -109,8 +106,8 @@ public Hdf5File(File parentDir, String filename, boolean allowExceptions){ * @throws HDF5LibraryException * @throws IOException */ - public void open() throws HDF5Exception, IOException { - this.open(Hdf5File.needToCreateFile); + public void open() throws BiosimulationsHdf5Writer.BiosimulationsHdfWriterException, IOException { + this.open(BiosimulationsHdf5File.needToCreateFile); } /** @@ -121,24 +118,29 @@ public void open() throws HDF5Exception, IOException { * @throws HDF5LibraryException * @throws IOException */ - public int open(boolean overwrite) throws HDF5Exception, IOException { - String path = this.javaFileTarget.getCanonicalPath(); - if (overwrite) this.fileId = H5.H5Fopen(path, H5F_ACC_RDWR, H5P_DEFAULT); - else this.fileId = H5.H5Fcreate(path, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); - if (this.fileId < 0){ - String typeOfOperation = overwrite? "opened [H5Fopen]" : "created [H5Fopened]"; + public int open(boolean overwrite) throws BiosimulationsHdfWriterException, IOException { + try { + String path = this.javaFileTarget.getCanonicalPath(); + if (overwrite) this.fileId = H5.H5Fopen(path, H5F_ACC_RDWR, H5P_DEFAULT); + else this.fileId = H5.H5Fcreate(path, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); + } catch (HDF5LibraryException e) { + throw new BiosimulationsHdf5Writer.BiosimulationsHdfWriterException("HDF5 Library Exception", e); + } + if (this.fileId < 0) { + String typeOfOperation = overwrite ? "opened [H5Fopen]" : "created [H5Fopened]"; String message = "HDF5 File could not be " + typeOfOperation + "; Something is preventing this."; - HDF5Exception e = new HDF5Exception(message); // investigate if Hdf5Exception would be more appropriate + IOException e = new IOException(message); // investigate if Hdf5Exception would be more appropriate logger.warn("Hdf5 error occured", e); if (this.allowExceptions) throw e; } + this.isOpen = true; return this.fileId; } /** * Add a group to the Hdf5 file based on a given path. If the group exists, the group_id will be returned. - * + * * @param groupPath the unix-style path *relative from the Hdf5 root (known as "/")* to place the group at * while hdf5 does allow with relative pathing from other groups, VCell does not support that at this time. * @return the group ID @@ -378,24 +380,28 @@ public int closeDataset(int datasetId) throws HDF5Exception { return H5.H5Dclose(datasetId); } - public int close() throws HDF5Exception { + public int close() throws BiosimulationsHdfWriterException { if (!this.isOpen) return 0; //this.fileId = HDF5Constants.H5I_INVALID_HID; this.isOpen = false; - // Don't forget to close datasets (and their dataspaces) - for (int datasetId : this.datasetToDataspaceMap.keySet()){ - this.closeDataset(datasetId); - } + try { + // Don't forget to close datasets (and their dataspaces) + for (int datasetId : this.datasetToDataspaceMap.keySet()){ + this.closeDataset(datasetId); + } - // Don't forget to close all groups - for (int groupId : this.idToPathMap.keySet()){ - H5.H5Gclose(groupId); - } - this.idToPathMap.clear(); - this.pathToIdMap.clear(); + // Don't forget to close all groups + for (int groupId : this.idToPathMap.keySet()){ + H5.H5Gclose(groupId); + } + this.idToPathMap.clear(); + this.pathToIdMap.clear(); - return this.fileId < 0 ? this.fileId : (this.fileId = H5.H5Fclose(this.fileId)); + return this.fileId < 0 ? this.fileId : (this.fileId = H5.H5Fclose(this.fileId)); + } catch (HDF5Exception e) { + throw new BiosimulationsHdfWriterException(e.getMessage(),e); + } } diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5Writer.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5Writer.java similarity index 81% rename from vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5Writer.java rename to vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5Writer.java index 42a66923ad..041ea32b94 100644 --- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5Writer.java +++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5Writer.java @@ -1,48 +1,53 @@ package org.vcell.cli.run.hdf5; import cbit.vcell.resource.NativeLib; -import ncsa.hdf.hdf5lib.H5; import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; -import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException; - import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.jlibsedml.Report; import org.jlibsedml.SedML; import org.vcell.cli.run.hdf5.Hdf5DataPreparer.Hdf5PreparedData; -import java.util.*; + import java.io.File; import java.io.IOException; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import java.util.Arrays; +import java.util.Set; /** * Static class for writing out Hdf5 formatted files */ -public class Hdf5Writer { +public class BiosimulationsHdf5Writer { + + public static class BiosimulationsHdfWriterException extends Exception { + private static final long serialVersionUID = 1L; + public BiosimulationsHdfWriterException(String message, Exception e) { + super(message, e); + } + } - private final static Logger logger = LogManager.getLogger(Hdf5Writer.class); + private final static Logger logger = LogManager.getLogger(BiosimulationsHdf5Writer.class); - private Hdf5Writer(){} // Static class = no instances allowed + private BiosimulationsHdf5Writer(){} // Static class = no instances allowed /** * Writes an HDF5 formatted file given a hdf5FileWrapper and a destination to write the file to. * * @param hdf5ExecutionResults the wrapper of hdf5 relevant data * @param outDirForCurrentSedml the directory to place the report file into, NOT the report file itself. - * @throws HDF5Exception if there is an expection thrown from hdf5 while using the library. + * @throws BiosimulationsHdfWriterException if there is an expection thrown from hdf5 while using the library. * @throws IOException if the computer encounteres an unexepcted system IO problem */ - public static void writeHdf5(HDF5ExecutionResults hdf5ExecutionResults, File outDirForCurrentSedml) throws HDF5Exception, IOException { + public static void writeHdf5(HDF5ExecutionResults hdf5ExecutionResults, File outDirForCurrentSedml) throws BiosimulationsHdfWriterException, IOException { boolean didFail = false; - Hdf5File masterHdf5; + BiosimulationsHdf5File masterHdf5; // Boot Hdf5 Library NativeLib.HDF5.load(); // Create and open the Hdf5 file logger.info("Creating hdf5 file `reports.h5` in" + outDirForCurrentSedml.getAbsolutePath()); - masterHdf5 = new Hdf5File(outDirForCurrentSedml); + masterHdf5 = new BiosimulationsHdf5File(outDirForCurrentSedml); masterHdf5.open(); try { @@ -102,10 +107,11 @@ else if (data.dataSource instanceof Hdf5SedmlResultsSpatial) } } } - } catch (Exception e) { // Catch runtime exceptions + } catch (HDF5Exception e) { // Catch runtime exceptions didFail = true; - logger.error("Error encountered while writing to BioSim-style HDF5.", e); - throw e; + String message = "Error encountered while writing to BioSim-style HDF5."; + logger.error(message, e); + throw new BiosimulationsHdfWriterException(message, e); } finally { try { final Level errorLevel = didFail ? Level.ERROR : Level.INFO; @@ -115,10 +121,11 @@ else if (data.dataSource instanceof Hdf5SedmlResultsSpatial) // Close up the file; lets deliver what we can write and flush out. masterHdf5.close(); logger.log(errorLevel, message); - } catch (HDF5LibraryException e){ + } catch (BiosimulationsHdfWriterException e){ masterHdf5.printErrorStack(); - logger.error("HDF5 Library Exception encountered while writing out to HDF5 file; Check std::err for stack"); - if (!didFail) throw e; + String message = "HDF5 Library Exception encountered while writing out to HDF5 file; Check std::err for stack"; + logger.error(message); + if (!didFail) throw new BiosimulationsHdfWriterException(message, e); } catch (Exception e) { e.printStackTrace(); } diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5DataPreparer.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5DataPreparer.java index 4111bc64b0..7351d92b94 100644 --- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5DataPreparer.java +++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/Hdf5DataPreparer.java @@ -1,17 +1,18 @@ package org.vcell.cli.run.hdf5; -import java.util.*; -import java.util.stream.Collectors; - -import org.jlibsedml.*; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.jlibsedml.DataSet; +import org.jlibsedml.Report; +import org.jlibsedml.VariableSymbol; + +import java.util.*; /** * Static data preparation class for Hdf5 files */ public class Hdf5DataPreparer { - private final static Logger logger = LogManager.getLogger(Hdf5File.class); + private final static Logger logger = LogManager.getLogger(Hdf5DataPreparer.class); public static class Hdf5PreparedData{ public String sedmlId; @@ -37,7 +38,7 @@ public static double[] getSpatialHdf5Attribute_Times(Report report, Hdf5SedmlRes } /** - * Reads a `Hdf5DatasetWrapper` contents and generates `Hdf5PreparedData` with spatial data for writing out to Hdf5 format via Hdf5Writer + * Reads a `Hdf5DatasetWrapper` contents and generates `Hdf5PreparedData` with spatial data for writing out to Hdf5 format via BiosimulationsHdf5Writer * * @param datasetWrapper the data relevant to an HDF5 output file * @return the prepared spatial data @@ -111,7 +112,7 @@ public static Hdf5PreparedData prepareSpatialData (Hdf5SedmlResults datasetWrapp } /** - * Reads a `Hdf5DatasetWrapper` contents and generates `Hdf5PreparedData` with nonspatial data for writing out to Hdf5 format via Hdf5Writer + * Reads a `Hdf5DatasetWrapper` contents and generates `Hdf5PreparedData` with nonspatial data for writing out to Hdf5 format via BiosimulationsHdf5Writer * * @param datasetWrapper the data relevant to an hdf5 output file * @return the prepared nonspatial data diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/NonspatialResultsConverter.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/NonspatialResultsConverter.java index a5d1dc9c62..4969a57eae 100644 --- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/NonspatialResultsConverter.java +++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/NonspatialResultsConverter.java @@ -1,33 +1,19 @@ package org.vcell.cli.run.hdf5; -import cbit.vcell.solver.Simulation; import cbit.vcell.parser.ExpressionException; +import cbit.vcell.solver.Simulation; import cbit.vcell.solver.TempSimulation; -import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; - -import org.jlibsedml.SedML; -import org.jlibsedml.AbstractTask; -import org.jlibsedml.Output; -import org.jlibsedml.Report; -import org.jlibsedml.Variable; -import org.jlibsedml.DataGenerator; -import org.jlibsedml.RepeatedTask; -import org.jlibsedml.Task; -import org.jlibsedml.SubTask; -import org.jlibsedml.UniformTimeCourse; -import org.jlibsedml.DataSet; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.jlibsedml.*; import org.jlibsedml.execution.IXPathToVariableIDResolver; import org.jlibsedml.modelsupport.SBMLSupport; import org.vcell.cli.PythonStreamException; import org.vcell.cli.run.PythonCalls; import org.vcell.cli.run.Status; -import org.vcell.sbml.vcell.SBMLNonspatialSimResults; import org.vcell.cli.run.TaskJob; -import org.vcell.util.DataAccessException; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import org.vcell.sbml.vcell.SBMLNonspatialSimResults; -import java.io.*; import java.nio.file.Paths; import java.util.*; public class NonspatialResultsConverter { diff --git a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/SpatialResultsConverter.java b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/SpatialResultsConverter.java index 518f9b8436..ab66b7b837 100644 --- a/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/SpatialResultsConverter.java +++ b/vcell-cli/src/main/java/org/vcell/cli/run/hdf5/SpatialResultsConverter.java @@ -1,10 +1,9 @@ package org.vcell.cli.run.hdf5; -import cbit.vcell.parser.ExpressionException; import cbit.vcell.solver.TempSimulation; -import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; - +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.jlibsedml.*; import org.jlibsedml.execution.IXPathToVariableIDResolver; import org.jlibsedml.modelsupport.SBMLSupport; @@ -12,17 +11,14 @@ import org.vcell.cli.run.PythonCalls; import org.vcell.cli.run.Status; import org.vcell.cli.run.TaskJob; -import org.vcell.util.DataAccessException; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import java.io.*; +import java.io.File; import java.nio.file.Paths; import java.util.*; public class SpatialResultsConverter { private final static Logger logger = LogManager.getLogger(SpatialResultsConverter.class); - public static Map> convertSpatialResultsToSedmlFormat(SedML sedml, Map spatialResultsHash, Map taskToSimulationMap, String sedmlLocation, String outDir) throws DataAccessException, IOException, HDF5Exception, ExpressionException, PythonStreamException { + public static Map> convertSpatialResultsToSedmlFormat(SedML sedml, Map spatialResultsHash, Map taskToSimulationMap, String sedmlLocation, String outDir) throws PythonStreamException { Map> results = new LinkedHashMap<>(); List allReports = SpatialResultsConverter.getReports(sedml.getOutputs()); diff --git a/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/Hdf5WriterTest.java b/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5WriterTest.java similarity index 93% rename from vcell-cli/src/test/java/org/vcell/cli/run/hdf5/Hdf5WriterTest.java rename to vcell-cli/src/test/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5WriterTest.java index e793295bac..942f76aa1a 100644 --- a/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/Hdf5WriterTest.java +++ b/vcell-cli/src/test/java/org/vcell/cli/run/hdf5/BiosimulationsHdf5WriterTest.java @@ -3,20 +3,22 @@ import cbit.vcell.resource.NativeLib; import cbit.vcell.resource.PropertyLoader; import com.google.common.io.Files; -import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; import org.jlibsedml.DataSet; import org.jlibsedml.Report; -import org.jlibsedml.SedML; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.vcell.util.VCellUtilityHub; import java.io.File; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.vcell.cli.run.hdf5.BiosimulationsHdf5Writer.BiosimulationsHdfWriterException; @Tag("Fast") -public class Hdf5WriterTest { +public class BiosimulationsHdf5WriterTest { public static HDF5ExecutionResults createExampleData() { @@ -95,12 +97,12 @@ public static HDF5ExecutionResults createExampleData() { } @Test - public void test() throws HDF5Exception, IOException { + public void test() throws BiosimulationsHdfWriterException, IOException { PropertyLoader.setProperty(PropertyLoader.installationRoot, new File("..").getAbsolutePath()); VCellUtilityHub.startup(VCellUtilityHub.MODE.CLI); NativeLib.HDF5.load(); - HDF5ExecutionResults exampleHdf5FileWrapper = Hdf5WriterTest.createExampleData(); + HDF5ExecutionResults exampleHdf5FileWrapper = BiosimulationsHdf5WriterTest.createExampleData(); File dir = Files.createTempDir(); - Hdf5Writer.writeHdf5(exampleHdf5FileWrapper, dir); + BiosimulationsHdf5Writer.writeHdf5(exampleHdf5FileWrapper, dir); } } \ No newline at end of file From 1006909d008f97bcb483a3252ea2e9fbb342b7ff Mon Sep 17 00:00:00 2001 From: Jim Schaff Date: Fri, 26 Apr 2024 18:50:26 -0400 Subject: [PATCH 11/16] isolate HDF5 calls for exporting UI table to HDF5 --- .../java/cbit/plot/gui/Plot2DDataPanel.java | 4 +- ...5Utils.java => UiTableExporterToHDF5.java} | 192 ++---------------- 2 files changed, 22 insertions(+), 174 deletions(-) rename vcell-core/src/main/java/cbit/vcell/simdata/{Hdf5Utils.java => UiTableExporterToHDF5.java} (59%) diff --git a/vcell-client/src/main/java/cbit/plot/gui/Plot2DDataPanel.java b/vcell-client/src/main/java/cbit/plot/gui/Plot2DDataPanel.java index d4aa587244..0c669b6f08 100644 --- a/vcell-client/src/main/java/cbit/plot/gui/Plot2DDataPanel.java +++ b/vcell-client/src/main/java/cbit/plot/gui/Plot2DDataPanel.java @@ -16,7 +16,7 @@ import cbit.vcell.math.ReservedVariable; import cbit.vcell.parser.Expression; import cbit.vcell.parser.SymbolTableEntry; -import cbit.vcell.simdata.Hdf5Utils; +import cbit.vcell.simdata.UiTableExporterToHDF5; import cbit.vcell.solver.Simulation; import com.google.common.io.Files; import org.apache.logging.log4j.LogManager; @@ -361,7 +361,7 @@ else if (copyAction == CopyAction.copyrow) { } } - File hdf5TempFile = Hdf5Utils.exportTableToHDF5(bHistogram, blankCellValue, columns, rows, xVarColumnName, hdf5DescriptionText, columnNames, paramScanParamNames, paramScanParamValues, rowColValues); + File hdf5TempFile = UiTableExporterToHDF5.exportTableToHDF5(bHistogram, blankCellValue, columns, rows, xVarColumnName, hdf5DescriptionText, columnNames, paramScanParamNames, paramScanParamValues, rowColValues); while(true) { JFileChooser jfc = new JFileChooser(); diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/Hdf5Utils.java b/vcell-core/src/main/java/cbit/vcell/simdata/UiTableExporterToHDF5.java similarity index 59% rename from vcell-core/src/main/java/cbit/vcell/simdata/Hdf5Utils.java rename to vcell-core/src/main/java/cbit/vcell/simdata/UiTableExporterToHDF5.java index 6bc5452723..429e06feb4 100644 --- a/vcell-core/src/main/java/cbit/vcell/simdata/Hdf5Utils.java +++ b/vcell-core/src/main/java/cbit/vcell/simdata/UiTableExporterToHDF5.java @@ -1,47 +1,18 @@ package cbit.vcell.simdata; -import java.io.File; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.ListIterator; - import cbit.vcell.math.ReservedVariable; -import org.apache.commons.lang.ArrayUtils; - -import cbit.vcell.math.Variable; import ncsa.hdf.hdf5lib.H5; import ncsa.hdf.hdf5lib.HDF5Constants; import ncsa.hdf.hdf5lib.exceptions.HDF5Exception; import ncsa.hdf.hdf5lib.exceptions.HDF5LibraryException; -public class Hdf5Utils { - private static final String HDF5_GROUP_SOLUTION = "/solution"; - private static final String HDF5_GROUP_EXTRAPOLATED_VOLUMES = "/extrapolated_volumes"; - private static final String HDF5_GROUP_DIRECTORY_SEPARATOR = "/"; - - /** - * Creates a relative path to the solution to the variable specified - * - * @param varName the name of the variable to path to. - * @return the relative path - */ - public static String getVarSolutionPath(String varName){ - return HDF5_GROUP_SOLUTION + HDF5_GROUP_DIRECTORY_SEPARATOR + Variable.getNameFromCombinedIdentifier(varName); - } - - /** - * Creates a relative path to the extrapolated values of a given variable name. - * - * @param varName name of the variable to path to - * @return the relative path - */ - public static String getVolVarExtrapolatedValuesPath(String varName){ - return HDF5_GROUP_EXTRAPOLATED_VOLUMES + HDF5_GROUP_DIRECTORY_SEPARATOR + "__" + Variable.getNameFromCombinedIdentifier(varName) + "_extrapolated__"; - } +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.ListIterator; +public class UiTableExporterToHDF5 { public static File exportTableToHDF5(boolean bHistogram, String blankCellValue, int[] columns, int[] rows, String xVarColumnName, String hdf5DescriptionText, String[] columnNames, String[] paramScanParamNames, Double[][] paramScanParamValues, Object[][] rowColValues) throws Exception { int hdf5FileID = -1;//Used if HDF5 format File hdf5TempFile = null; @@ -119,10 +90,10 @@ public static File exportTableToHDF5(boolean bHistogram, String blankCellValue, if(selectedColCount == 0) { continue; } - int jobGroupID = -1;//(int) Hdf5Utils.createGroup(hdf5FileID, "Set "+k); + int jobGroupID = -1;//(int) UiTableExporterToHDF5.createGroup(hdf5FileID, "Set "+k); //writeHDF5Dataset(hdf5FileID, "Set "+k, null, null, false); - HDF5WriteHelper help0 = null;//Hdf5Utils.createDataset(jobGroupID, "data", new long[] {selectedColCount,rows.length}); - //(HDF5WriteHelper) Hdf5Utils.writeHDF5Dataset(jobGroupID, "data", new long[] {selectedColCount,rows.length}, new Object[] {}, false); + HDF5WriteHelper help0 = null;//UiTableExporterToHDF5.createDataset(jobGroupID, "data", new long[] {selectedColCount,rows.length}); + //(HDF5WriteHelper) UiTableExporterToHDF5.writeHDF5Dataset(jobGroupID, "data", new long[] {selectedColCount,rows.length}, new Object[] {}, false); //((DefaultTableModel)getScrollPaneTable().getModel()).getDataVector() double[] fromData = new double[rows.length*selectedColCount]; int actualLength = -1; @@ -195,11 +166,11 @@ public static File exportTableToHDF5(boolean bHistogram, String blankCellValue, //writeHDF5Dataset(help0.hdf5DatasetValuesID, null, null, objArr, false); insertAttribute(help0.hdf5DatasetValuesID, "_type", "ODE Data Export");//.writeHDF5Dataset(help0.hdf5DatasetValuesID, "_type", null, "ODE Data Export", true); insertAttributes(help0.hdf5DatasetValuesID,"dataSetDataTypes", dataTypes);//.writeHDF5Dataset(help0.hdf5DatasetValuesID, "dataSetDataTypes", null, dataTypes, true); - insertAttributes(help0.hdf5DatasetValuesID,"dataSetIds",dataIDs);//Hdf5Utils.writeHDF5Dataset(help0.hdf5DatasetValuesID, "dataSetIds", null,dataIDs , true); - insertAttributes(help0.hdf5DatasetValuesID,"dataSetLabels",dataLabels);//Hdf5Utils.writeHDF5Dataset(help0.hdf5DatasetValuesID, "dataSetLabels", null,dataLabels , true); - insertAttributes(help0.hdf5DatasetValuesID,"dataSetNames",dataNames);//Hdf5Utils.writeHDF5Dataset(help0.hdf5DatasetValuesID, "dataSetNames", null,dataNames , true); - insertAttributes(help0.hdf5DatasetValuesID,"dataSetShapes",dataShapes);//Hdf5Utils.writeHDF5Dataset(help0.hdf5DatasetValuesID, "dataSetShapes", null,dataShapes , true); - insertAttribute(help0.hdf5DatasetValuesID,"id","report");//Hdf5Utils.writeHDF5Dataset(help0.hdf5DatasetValuesID, "id", null,"report" , true); + insertAttributes(help0.hdf5DatasetValuesID,"dataSetIds",dataIDs);//UiTableExporterToHDF5.writeHDF5Dataset(help0.hdf5DatasetValuesID, "dataSetIds", null,dataIDs , true); + insertAttributes(help0.hdf5DatasetValuesID,"dataSetLabels",dataLabels);//UiTableExporterToHDF5.writeHDF5Dataset(help0.hdf5DatasetValuesID, "dataSetLabels", null,dataLabels , true); + insertAttributes(help0.hdf5DatasetValuesID,"dataSetNames",dataNames);//UiTableExporterToHDF5.writeHDF5Dataset(help0.hdf5DatasetValuesID, "dataSetNames", null,dataNames , true); + insertAttributes(help0.hdf5DatasetValuesID,"dataSetShapes",dataShapes);//UiTableExporterToHDF5.writeHDF5Dataset(help0.hdf5DatasetValuesID, "dataSetShapes", null,dataShapes , true); + insertAttribute(help0.hdf5DatasetValuesID,"id","report");//UiTableExporterToHDF5.writeHDF5Dataset(help0.hdf5DatasetValuesID, "id", null,"report" , true); if(paramNames.size() != 0) { insertAttributes(help0.hdf5DatasetValuesID,"paramNames",paramNames); insertAttributes(help0.hdf5DatasetValuesID,"paramValues",paramValues); @@ -222,7 +193,7 @@ public static File exportTableToHDF5(boolean bHistogram, String blankCellValue, /** * Helper class to ensure HDF5 documents are closed properly. */ - public static class HDF5WriteHelper { + private static class HDF5WriteHelper { /** * The id number of the hdf5 dataspace */ @@ -263,7 +234,7 @@ public void close() throws HDF5LibraryException { * @return a HDF5 Writer helper class to store the relevant values * @throws HDF5Exception if the hdf5 library encounters something unusual */ - public static HDF5WriteHelper createDataset(int hdf5GroupID,String datasetName,long[] dims) throws HDF5Exception{ + private static HDF5WriteHelper createDataset(int hdf5GroupID,String datasetName,long[] dims) throws HDF5Exception{ //Create dataset and return it, must be closed when finished long[] datasetDimensions = dims; int hdf5DataspaceIDValues = H5.H5Screate_simple(datasetDimensions.length, datasetDimensions, null); @@ -279,7 +250,7 @@ public static HDF5WriteHelper createDataset(int hdf5GroupID,String datasetName,l * @return the new group's ID number * @throws HDF5Exception if the hdf5 library encounters something unusual */ - public static int createGroup(int hdf5GroupID,String groupName) throws HDF5Exception{ + private static int createGroup(int hdf5GroupID,String groupName) throws HDF5Exception{ return H5.H5Gcreate(hdf5GroupID, (String)groupName,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); } @@ -298,7 +269,7 @@ public static int createGroup(int hdf5GroupID,String groupName) throws HDF5Excep * @throws IllegalArgumentException * @throws HDF5Exception */ - public static void copySlice(int copyToDataSet,double[] copyFromData,long[] copyToStart,long[] copyToLength,long[] copyFromDims,long[] copyFromStart,long[] copyFromLength,int dataspaceID) throws NullPointerException, IllegalArgumentException, HDF5Exception { + private static void copySlice(int copyToDataSet,double[] copyFromData,long[] copyToStart,long[] copyToLength,long[] copyFromDims,long[] copyFromStart,long[] copyFromLength,int dataspaceID) throws NullPointerException, IllegalArgumentException, HDF5Exception { int hdf5DataspaceIDSlice = H5.H5Screate_simple(copyFromDims.length, copyFromDims, null); //Select the generated sliceData to copy-from H5.H5Sselect_hyperslab(hdf5DataspaceIDSlice, HDF5Constants.H5S_SELECT_SET, copyFromStart, null, copyFromLength, null); @@ -319,7 +290,7 @@ public static void copySlice(int copyToDataSet,double[] copyFromData,long[] copy * @throws NullPointerException (unsure how this occurs) * @throws HDF5Exception if the hdf5 library encounters something unusual */ - public static void insertAttribute(int hdf5GroupID,String attributeName,String data) throws NullPointerException, HDF5Exception { + private static void insertAttribute(int hdf5GroupID,String attributeName,String data) throws NullPointerException, HDF5Exception { //insertAttributes(hdf5GroupID, dataspaceName, new ArrayList(Arrays.asList(new String[] {data}))); //String[] attr = data.toArray(new String[0]); @@ -347,7 +318,7 @@ public static void insertAttribute(int hdf5GroupID,String attributeName,String d * @throws NullPointerException (unsure how this occurs) * @throws HDF5Exception if the hdf5 library encounters something unusual */ - public static void insertAttributes(int hdf5GroupID,String attributeName,List data) throws NullPointerException, HDF5Exception { + private static void insertAttributes(int hdf5GroupID,String attributeName,List data) throws NullPointerException, HDF5Exception { String[] attr = data.toArray(new String[0]); long[] dims = new long[] {attr.length}; // Always an array of length == 1 StringBuffer sb = new StringBuffer(); @@ -386,127 +357,4 @@ public static void insertAttributes(int hdf5GroupID,String attributeName,List(Arrays.asList(new String[] {data}))); -// } - - /** - * Insert a dataset at the specififed group where the data are strings - * - * @param hdf5GroupID the id of the group to apply the dataset to - * @param datasetName name of the dataset - * @param dims dimentional meansurements - * @param data the data to fill the dataset - * @throws NullPointerException (unsure how this occurs) - * @throws HDF5Exception if the hdf5 library encounters something unusual - */ - public static void insertStrings(int hdf5GroupID,String datasetName,long[] dims,List data) throws NullPointerException, HDF5Exception { - int largestStrLen = 0; - for(int i=0;i data) throws NullPointerException, HDF5Exception { - double[] hdfData = ArrayUtils.toPrimitive(((ArrayList)data).toArray(new Double[0])); - int hdf5DataspaceID = H5.H5Screate_simple(dims.length, dims, null); - int hdf5DatasetID = H5.H5Dcreate(hdf5GroupID, dataspaceName,HDF5Constants.H5T_NATIVE_DOUBLE, hdf5DataspaceID,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); - H5.H5Dwrite_double(hdf5DatasetID, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, hdfData); - H5.H5Dclose(hdf5DatasetID); - H5.H5Sclose(hdf5DataspaceID); - } } From c43a289dd37cc7055c0d1a5d2a61ac309be7e285 Mon Sep 17 00:00:00 2001 From: Jim Schaff Date: Fri, 26 Apr 2024 18:50:52 -0400 Subject: [PATCH 12/16] Isolate HDF5 for Chombo Vis --- .../org/vcell/vis/io/ChomboFileReader.java | 116 +++++++++++++- .../main/java/org/vcell/vis/io/DataSet.java | 147 ++---------------- 2 files changed, 126 insertions(+), 137 deletions(-) diff --git a/vcell-core/src/main/java/org/vcell/vis/io/ChomboFileReader.java b/vcell-core/src/main/java/org/vcell/vis/io/ChomboFileReader.java index 4627e0053b..969384a092 100644 --- a/vcell-core/src/main/java/org/vcell/vis/io/ChomboFileReader.java +++ b/vcell-core/src/main/java/org/vcell/vis/io/ChomboFileReader.java @@ -1,15 +1,19 @@ package org.vcell.vis.io; -import java.io.File; +import java.io.*; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; import java.util.Vector; +import java.util.zip.ZipEntry; import javax.swing.tree.DefaultMutableTreeNode; +import cbit.vcell.math.Variable; import ncsa.hdf.object.h5.H5CompoundDS; import ncsa.hdf.object.h5.H5ScalarDS; +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipFile; import org.vcell.vis.chombo.ChomboBoundaries; import org.vcell.vis.chombo.ChomboBoundaries.BorderCellInfo; import org.vcell.vis.chombo.ChomboBoundaries.MeshMetrics; @@ -40,8 +44,116 @@ public class ChomboFileReader { private static final String MESH_ATTR_ORIGIN = "origin"; private static final String MESH_ATTR_EXTENT = "extent"; + public static File createTempHdf5File(InputStream is) throws IOException + { + OutputStream out = null; + try{ + File tempFile = File.createTempFile("temp", "hdf5"); + out=new FileOutputStream(tempFile); + byte buf[] = new byte[1024]; + int len; + while((len=is.read(buf))>0) { + out.write(buf,0,len); + } + return tempFile; + } + finally + { + try { + if (out != null) { + out.close(); + } + } catch (Exception ex) { + // ignore + } + } + } - /** + static File createTempHdf5File(ZipFile zipFile, String fileName) throws IOException + { + InputStream is = null; + try + { + ZipEntry dataEntry = zipFile.getEntry(fileName); + is = zipFile.getInputStream((ZipArchiveEntry) dataEntry); + return createTempHdf5File(is); + } + finally + { + try + { + if (is != null) + { + is.close(); + } + } + catch (Exception ex) + { + // ignore + } + } + } + + public static List readHdf5SolutionMetaData(InputStream is) throws Exception + { + File tempFile = null; + FileFormat solFile = null; + ArrayList dataBlockList = new ArrayList<>(); + try{ + tempFile = createTempHdf5File(is); + + FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); + solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ); + solFile.open(); + DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode(); + Group rootGroup = (Group)rootNode.getUserObject(); + Group solGroup = (Group)rootGroup.getMemberList().get(0); + + List memberList = solGroup.getMemberList(); + for (HObject member : memberList) + { + if (!(member instanceof Dataset)){ + continue; + } + Dataset dataset = (Dataset)member; + String dsname = dataset.getName(); + int vt = -1; + String domain = null; + List solAttrList = dataset.getMetadata(); + for (Attribute attr : solAttrList) + { + String attrName = attr.getName(); + if(attrName.equals("variable type")){ + Object obj = attr.getValue(); + vt = ((int[])obj)[0]; + } else if (attrName.equals("domain")) { + Object obj = attr.getValue(); + domain = ((String[])obj)[0]; + } + } + long[] dims = dataset.getDims(); + String varName = domain == null ? dsname : domain + Variable.COMBINED_IDENTIFIER_SEPARATOR + dsname; + dataBlockList.add(DataBlock.createDataBlock(varName, vt, (int) dims[0], 0)); + } + return dataBlockList; + } finally { + try { + if (solFile != null) { + solFile.close(); + } + if (tempFile != null) { + if (!tempFile.delete()) { + System.err.println("couldn't delete temp file " + tempFile); + } + } + } catch(Exception e) { + // ignore + } + } + } + + + /** * Z = boolean [B = byte [S = short diff --git a/vcell-core/src/main/java/org/vcell/vis/io/DataSet.java b/vcell-core/src/main/java/org/vcell/vis/io/DataSet.java index 7f2952e2aa..8cddfbbc3d 100644 --- a/vcell-core/src/main/java/org/vcell/vis/io/DataSet.java +++ b/vcell-core/src/main/java/org/vcell/vis/io/DataSet.java @@ -9,37 +9,18 @@ */ package org.vcell.vis.io; -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.List; -import java.util.Vector; -import java.util.zip.ZipEntry; - -import javax.swing.tree.DefaultMutableTreeNode; +import cbit.vcell.math.VariableType; +import cbit.vcell.simdata.ChomboSimDataReader; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; -//import java.util.zip.ZipFile; import org.apache.commons.compress.archivers.zip.ZipFile; - -import cbit.vcell.math.Variable; -import cbit.vcell.math.VariableType; -import ncsa.hdf.object.Attribute; -import ncsa.hdf.object.Dataset; -import ncsa.hdf.object.FileFormat; -import ncsa.hdf.object.Group; -import ncsa.hdf.object.HObject; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import java.io.*; +import java.util.List; +import java.util.Vector; + public class DataSet implements java.io.Serializable { private final static Logger lg = LogManager.getLogger(DataSet.class); @@ -213,7 +194,10 @@ public void read(File file, File zipFile) throws IOException, OutOfMemoryError { if(is != null && zipFile!=null && isChombo(zipFile)){ try { - readHdf5SolutionMetaData(is); + List dataBlockList = ChomboFileReader.readHdf5SolutionMetaData(is); + for (DataBlock dataBlock : dataBlockList) { + this.dataBlockList.addElement(dataBlock); + } } catch (Exception e) { throw new IOException(e.getMessage(),e); } @@ -240,115 +224,8 @@ public void read(File file, File zipFile) throws IOException, OutOfMemoryError { private boolean isChombo(File zipFile){ return zipFile.getName().endsWith(".hdf5.zip"); } - - private static File createTempHdf5File(InputStream is) throws IOException - { - OutputStream out = null; - try{ - File tempFile = File.createTempFile("temp", "hdf5"); - out=new FileOutputStream(tempFile); - byte buf[] = new byte[1024]; - int len; - while((len=is.read(buf))>0) { - out.write(buf,0,len); - } - return tempFile; - } - finally - { - try { - if (out != null) { - out.close(); - } - } catch (Exception ex) { - // ignore - } - } - } - - static File createTempHdf5File(ZipFile zipFile, String fileName) throws IOException - { - InputStream is = null; - try - { - ZipEntry dataEntry = zipFile.getEntry(fileName); - is = zipFile.getInputStream((ZipArchiveEntry) dataEntry); - return createTempHdf5File(is); - } - finally - { - try - { - if (is != null) - { - is.close(); - } - } - catch (Exception ex) - { - // ignore - } - } - } - - - private void readHdf5SolutionMetaData(InputStream is) throws Exception - { - File tempFile = null; - FileFormat solFile = null; - try{ - tempFile = createTempHdf5File(is); - - FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5); - solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ); - solFile.open(); - DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode)solFile.getRootNode(); - Group rootGroup = (Group)rootNode.getUserObject(); - Group solGroup = (Group)rootGroup.getMemberList().get(0); - - List memberList = solGroup.getMemberList(); - for (HObject member : memberList) - { - if (!(member instanceof Dataset)){ - continue; - } - Dataset dataset = (Dataset)member; - String dsname = dataset.getName(); - int vt = -1; - String domain = null; - List solAttrList = dataset.getMetadata(); - for (Attribute attr : solAttrList) - { - String attrName = attr.getName(); - if(attrName.equals("variable type")){ - Object obj = attr.getValue(); - vt = ((int[])obj)[0]; - } else if (attrName.equals("domain")) { - Object obj = attr.getValue(); - domain = ((String[])obj)[0]; - } - } - long[] dims = dataset.getDims(); - String varName = domain == null ? dsname : domain + Variable.COMBINED_IDENTIFIER_SEPARATOR + dsname; - dataBlockList.addElement(DataBlock.createDataBlock(varName, vt, (int) dims[0], 0)); - } - } finally { - try { - if (solFile != null) { - solFile.close(); - } - if (tempFile != null) { - if (!tempFile.delete()) { - System.err.println("couldn't delete temp file " + tempFile); - } - } - } catch(Exception e) { - // ignore - } - } - } - - + + public static void writeNew(File file, String[] varNameArr, VariableType[] varTypeArr, org.vcell.util.ISize size, double[][] dataArr) throws IOException { FileOutputStream fos = null; From d9487d26be838921188d809175bf84fd8af73c31 Mon Sep 17 00:00:00 2001 From: Jim Schaff Date: Fri, 26 Apr 2024 18:51:20 -0400 Subject: [PATCH 13/16] Isolate HDF5 for Chombo simdata --- .../vcell/simdata/ChomboSimDataReader.java | 32 +++++++++++++++++-- .../main/java/cbit/vcell/simdata/DataSet.java | 23 +++---------- 2 files changed, 34 insertions(+), 21 deletions(-) diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimDataReader.java b/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimDataReader.java index 44fb73623b..61ac310a96 100644 --- a/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimDataReader.java +++ b/vcell-core/src/main/java/cbit/vcell/simdata/ChomboSimDataReader.java @@ -10,11 +10,37 @@ import javax.swing.tree.DefaultMutableTreeNode; import java.io.*; +import java.util.ArrayList; import java.util.List; import java.util.Vector; import java.util.zip.ZipEntry; public class ChomboSimDataReader { + private static final String HDF5_GROUP_SOLUTION = "/solution"; + private static final String HDF5_GROUP_EXTRAPOLATED_VOLUMES = "/extrapolated_volumes"; + private static final String HDF5_GROUP_DIRECTORY_SEPARATOR = "/"; + + /** + * Creates a relative path to the solution to the variable specified + * + * @param varName the name of the variable to path to. + * @return the relative path + */ + public static String getVarSolutionPath(String varName){ + return HDF5_GROUP_SOLUTION + HDF5_GROUP_DIRECTORY_SEPARATOR + Variable.getNameFromCombinedIdentifier(varName); + } + + /** + * Creates a relative path to the extrapolated values of a given variable name. + * + * @param varName name of the variable to path to + * @return the relative path + */ + public static String getVolVarExtrapolatedValuesPath(String varName){ + return HDF5_GROUP_EXTRAPOLATED_VOLUMES + HDF5_GROUP_DIRECTORY_SEPARATOR + "__" + Variable.getNameFromCombinedIdentifier(varName) + "_extrapolated__"; + } + + public static void getNextDataAtCurrentTimeChombo(double[][] returnValues, ZipFile currentZipFile, String[] varNames, int[][] varIndexes, String[] simDataFileNames, int masterTimeIndex) throws Exception { File tempFile = null; FileFormat solFile = null; @@ -46,7 +72,7 @@ else if (varName.endsWith(OutsideVariable.OUTSIDE_VARIABLE_SUFFIX)) } else { - String varPath = Hdf5Utils.getVarSolutionPath(varNames[k]); + String varPath = getVarSolutionPath(varNames[k]); HObject solObj = FileFormat.findObject(solFile, varPath); if (solObj instanceof Dataset) { Dataset dataset = (Dataset)solObj; @@ -157,7 +183,7 @@ public static double[] readHdf5VariableSolution(File zipfile, String fileName, S solFile.open(); if (varName != null) { - String varPath = Hdf5Utils.getVarSolutionPath(varName); + String varPath = getVarSolutionPath(varName); HObject solObj = FileFormat.findObject(solFile, varPath); if (solObj instanceof Dataset) { @@ -218,7 +244,7 @@ private static double[] readChomboExtrapolatedValues(String varName, FileFormat double data[] = null; if (varName != null) { - String varPath = Hdf5Utils.getVolVarExtrapolatedValuesPath(varName); + String varPath = getVolVarExtrapolatedValuesPath(varName); HObject solObj = FileFormat.findObject(solFile, varPath); if (solObj == null) { diff --git a/vcell-core/src/main/java/cbit/vcell/simdata/DataSet.java b/vcell-core/src/main/java/cbit/vcell/simdata/DataSet.java index 472e1ac1ce..ba34007b97 100644 --- a/vcell-core/src/main/java/cbit/vcell/simdata/DataSet.java +++ b/vcell-core/src/main/java/cbit/vcell/simdata/DataSet.java @@ -9,29 +9,16 @@ */ package cbit.vcell.simdata; -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.Vector; -//import java.util.zip.ZipEntry; -//import java.util.zip.ZipFile; - -import cbit.vcell.solvers.mb.MovingBoundaryReader; -import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; -import org.apache.commons.compress.archivers.zip.ZipFile; import cbit.vcell.math.VariableType; import cbit.vcell.simdata.SimulationData.SolverDataType; +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipFile; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.vcell.vis.io.ChomboFileReader; + +import java.io.*; +import java.util.Vector; public class DataSet implements java.io.Serializable { From b73a652fba6325798e15fe7f34c4f4905abb1a45 Mon Sep 17 00:00:00 2001 From: Jim Schaff Date: Fri, 26 Apr 2024 18:51:46 -0400 Subject: [PATCH 14/16] Isolate HDF5 for MovingBoundary --- ...5Path.java => MovingBoundardyVH5Path.java} | 6 ++-- ....java => MovingBoundardyVH5TypedPath.java} | 6 ++-- .../solvers/mb/MovingBoundaryReader.java | 28 ++++++++--------- ...t.java => MovingBoundardyVH5PathTest.java} | 30 +++++++++---------- ...lient.java => MovingBoundaryH5Client.java} | 12 ++++---- ...=> MovingBoundaryH5FileStructureTest.java} | 4 +-- .../solvers/mb/MovingBoundaryResultTest.java | 2 +- ...set.java => MovingBoundaryVH5Dataset.java} | 6 ++-- 8 files changed, 46 insertions(+), 48 deletions(-) rename vcell-core/src/main/java/cbit/vcell/solvers/mb/{VH5Path.java => MovingBoundardyVH5Path.java} (93%) rename vcell-core/src/main/java/cbit/vcell/solvers/mb/{VH5TypedPath.java => MovingBoundardyVH5TypedPath.java} (91%) rename vcell-core/src/test/java/cbit/vcell/solvers/mb/{VH5PathTest.java => MovingBoundardyVH5PathTest.java} (71%) rename vcell-core/src/test/java/cbit/vcell/solvers/mb/{H5Client.java => MovingBoundaryH5Client.java} (87%) rename vcell-core/src/test/java/cbit/vcell/solvers/mb/{H5FileStructure.java => MovingBoundaryH5FileStructureTest.java} (97%) rename vcell-core/src/test/java/cbit/vcell/solvers/mb/{VH5Dataset.java => MovingBoundaryVH5Dataset.java} (94%) diff --git a/vcell-core/src/main/java/cbit/vcell/solvers/mb/VH5Path.java b/vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundardyVH5Path.java similarity index 93% rename from vcell-core/src/main/java/cbit/vcell/solvers/mb/VH5Path.java rename to vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundardyVH5Path.java index a313718abd..a175262350 100644 --- a/vcell-core/src/main/java/cbit/vcell/solvers/mb/VH5Path.java +++ b/vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundardyVH5Path.java @@ -24,18 +24,18 @@ * * @author GWeatherby */ -public class VH5Path { +public class MovingBoundardyVH5Path { protected Object target; protected Exception exc; - protected static final Logger lg = LogManager.getLogger(VH5Path.class); + protected static final Logger lg = LogManager.getLogger(MovingBoundardyVH5Path.class); /** * @param g staring point, not null * @param names path to search */ - public VH5Path(Group g, String... names){ + public MovingBoundardyVH5Path(Group g, String... names){ target = null; exc = null; try { diff --git a/vcell-core/src/main/java/cbit/vcell/solvers/mb/VH5TypedPath.java b/vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundardyVH5TypedPath.java similarity index 91% rename from vcell-core/src/main/java/cbit/vcell/solvers/mb/VH5TypedPath.java rename to vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundardyVH5TypedPath.java index 26039f69a0..d9d5b4358d 100644 --- a/vcell-core/src/main/java/cbit/vcell/solvers/mb/VH5TypedPath.java +++ b/vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundardyVH5TypedPath.java @@ -9,7 +9,7 @@ import ncsa.hdf.object.h5.H5ScalarDS; /** - * extends VH5Path to include type checking and data conversion. Throws exception if data not found, or + * extends MovingBoundardyVH5Path to include type checking and data conversion. Throws exception if data not found, or * not of correct type, and no implemented conversion works. *

* Single value primitives should be retrieved by requesting array and verifying it's a single element @@ -17,9 +17,9 @@ * @param type of returned data. primitives not supported, autoboxing not supported * @author GWeatherby */ -public class VH5TypedPath extends VH5Path { +public class MovingBoundardyVH5TypedPath extends MovingBoundardyVH5Path { - public VH5TypedPath(Group g, Class clzz, String... names){ + public MovingBoundardyVH5TypedPath(Group g, Class clzz, String... names){ super(g, names); Objects.requireNonNull(clzz); if(clzz.isPrimitive()){ diff --git a/vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundaryReader.java b/vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundaryReader.java index 8d6604da49..238e85e756 100644 --- a/vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundaryReader.java +++ b/vcell-core/src/main/java/cbit/vcell/solvers/mb/MovingBoundaryReader.java @@ -96,7 +96,7 @@ public int lastTimeIndex(){ void testquery(){ try { -// VH5TypedPath path = new VH5TypedPath<>(root, H5ScalarDS.class,"boundaries"); +// MovingBoundardyVH5TypedPath path = new MovingBoundardyVH5TypedPath<>(root, H5ScalarDS.class,"boundaries"); // H5ScalarDS hsd = path.get(); // hsd.init( ); // int[] si = hsd.getSelectedIndex(); @@ -107,17 +107,17 @@ void testquery(){ // sdims[0] = 1; // Object o2 = hsd.read(); // System.out.println(o2); -// VH5TypedPath dpath = new VH5TypedPath<>(root, String[].class,"boundaries"); +// MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath<>(root, String[].class,"boundaries"); // String[] d = dpath.get(); // System.out.println(d); -// VH5Path path = new VH5Path(root,"generationTimes"); +// MovingBoundardyVH5Path path = new MovingBoundardyVH5Path(root,"generationTimes"); // Object o = path.getData(); // H5ScalarDS hsd = (H5ScalarDS) o; // Object o2 = hsd.read(); // System.out.println(o2); -// VH5TypedPath dpath = new VH5TypedPath(root, H5CompoundDS.class,"elements"); +// MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath(root, H5CompoundDS.class,"elements"); // H5CompoundDS cds = dpath.get(); // cds.init(); // selectPlane(cds,50,50,0); @@ -127,7 +127,7 @@ void testquery(){ // int id = dts[0].open(); // o = cds.getData( ); // -// //VH5Path path2 = new VH5Path(root,"elements","volumePointsX"); +// //MovingBoundardyVH5Path path2 = new MovingBoundardyVH5Path(root,"elements","volumePointsX"); // // o = path2.getData(); // System.out.println(o); // @@ -141,40 +141,40 @@ void testquery(){ } private double[] getDoubleArray(String... names){ - VH5TypedPath dpath = new VH5TypedPath(root, double[].class, names); + MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath(root, double[].class, names); return dpath.get(); } private double singleDouble(String... names){ double[] a = getDoubleArray(names); if(a.length != 1){ - throw new MovingBoundaryResultException(VH5Path.concat(names) + " is not single element array"); + throw new MovingBoundaryResultException(MovingBoundardyVH5Path.concat(names) + " is not single element array"); } return a[0]; } private long[] getLongArray(String... names){ - VH5TypedPath dpath = new VH5TypedPath(root, long[].class, names); + MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath(root, long[].class, names); return dpath.get(); } private long singleLong(String... names){ long[] a = getLongArray(names); if(a.length != 1){ - throw new MovingBoundaryResultException(VH5Path.concat(names) + " is not single element array"); + throw new MovingBoundaryResultException(MovingBoundardyVH5Path.concat(names) + " is not single element array"); } return a[0]; } private int[] getIntArray(String... names){ - VH5TypedPath dpath = new VH5TypedPath(root, int[].class, names); + MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath(root, int[].class, names); return dpath.get(); } private int singleInt(String... names){ int[] a = getIntArray(names); if(a.length != 1){ - throw new MovingBoundaryResultException(VH5Path.concat(names) + " is not single element array"); + throw new MovingBoundaryResultException(MovingBoundardyVH5Path.concat(names) + " is not single element array"); } return a[0]; } @@ -400,10 +400,10 @@ private class PlaneNodes { final H5CompoundDS species; PlaneNodes() throws Exception{ - VH5TypedPath dpath = new VH5TypedPath(root, H5CompoundDS.class, "elements"); + MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath(root, H5CompoundDS.class, "elements"); elements = dpath.get(); elements.read(); - dpath = new VH5TypedPath(root, H5CompoundDS.class, "species"); + dpath = new MovingBoundardyVH5TypedPath(root, H5CompoundDS.class, "species"); species = dpath.get(); species.read(); } @@ -441,7 +441,7 @@ public int[] getBoundaryIndexes(int timeIndex){ VCAssert.assertTrue(timeIndex >= 0, "negative time index"); validateTimeIndex(timeIndex); - VH5TypedPath path = new VH5TypedPath<>(root, H5ScalarDS.class, "boundaries"); + MovingBoundardyVH5TypedPath path = new MovingBoundardyVH5TypedPath<>(root, H5ScalarDS.class, "boundaries"); H5ScalarDS hsd = path.get(); hsd.init(); long[] start = hsd.getStartDims(); diff --git a/vcell-core/src/test/java/cbit/vcell/solvers/mb/VH5PathTest.java b/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundardyVH5PathTest.java similarity index 71% rename from vcell-core/src/test/java/cbit/vcell/solvers/mb/VH5PathTest.java rename to vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundardyVH5PathTest.java index 1ee0844c14..9c6a2e59a3 100644 --- a/vcell-core/src/test/java/cbit/vcell/solvers/mb/VH5PathTest.java +++ b/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundardyVH5PathTest.java @@ -24,7 +24,7 @@ @Disabled @Tag("Fast") -public class VH5PathTest extends H5Client { +public class MovingBoundardyVH5PathTest extends MovingBoundaryH5Client { private static String fname = FILE; private FileFormat testFile = null; private Group root = null; @@ -65,25 +65,25 @@ public void run() { // create the file and add groups ans dataset into the file try { Group root = (Group) ((javax.swing.tree.DefaultMutableTreeNode) testFile.getRootNode()).getUserObject(); - VH5Path vpath = new VH5Path(root, "elements" ,"volume"); + MovingBoundardyVH5Path vpath = new MovingBoundardyVH5Path(root, "elements" ,"volume"); System.out.println(vpath.foundType()); - VH5TypedPath tpath = new VH5TypedPath(root, double[].class,"elements" ,"volume"); + MovingBoundardyVH5TypedPath tpath = new MovingBoundardyVH5TypedPath(root, double[].class,"elements" ,"volume"); double[] e = tpath.get(); System.out.println(e[0]); - VH5Path bpPath = new VH5Path(root, "elements" ,"boundaryPosition"); + MovingBoundardyVH5Path bpPath = new MovingBoundardyVH5Path(root, "elements" ,"boundaryPosition"); Object data = bpPath.getData(); System.out.println(data.getClass().getSimpleName()); - VH5Path vpPath = new VH5Path(root, "elements" ,"volumePoints"); + MovingBoundardyVH5Path vpPath = new MovingBoundardyVH5Path(root, "elements" ,"volumePoints"); data = vpPath.getData(); System.out.println(data.getClass().getSimpleName()); -// VH5TypedPath spath = new VH5TypedPath(root, String[].class,"elements" ,"front description"); - VH5TypedPath spath = new VH5TypedPath(root, String.class,"elements" ,"front description"); +// MovingBoundardyVH5TypedPath spath = new MovingBoundardyVH5TypedPath(root, String[].class,"elements" ,"front description"); + MovingBoundardyVH5TypedPath spath = new MovingBoundardyVH5TypedPath(root, String.class,"elements" ,"front description"); // String[] sdata = spath.get(); // System.out.println(sdata[0]); System.out.println(spath.get( )); - VH5Path xpath = new VH5Path(root, "elements" ,"front description"); + MovingBoundardyVH5Path xpath = new MovingBoundardyVH5Path(root, "elements" ,"front description"); Object o = xpath.getData(); System.out.println(o); dtype("elements","endX"); @@ -94,15 +94,15 @@ public void run() { dtype("solverTimeStep"); dtype("timeStep"); dtype("timeStepTimes"); - VH5TypedPath ipath = new VH5TypedPath(root, int[].class,"lastTimeIndex"); + MovingBoundardyVH5TypedPath ipath = new MovingBoundardyVH5TypedPath(root, int[].class,"lastTimeIndex"); System.out.println(Arrays.toString(ipath.get())); - VH5TypedPath lpath = new VH5TypedPath(root, long[].class,"elements","numX"); + MovingBoundardyVH5TypedPath lpath = new MovingBoundardyVH5TypedPath(root, long[].class,"elements","numX"); System.out.println(Arrays.toString(lpath.get())); // System.out.println("-------"); -// VH5TypedPath spath = new VH5TypedPath(root, H5ScalarDS.class,"endTime"); +// MovingBoundardyVH5TypedPath spath = new MovingBoundardyVH5TypedPath(root, H5ScalarDS.class,"endTime"); // H5ScalarDS ds = spath.get( ); // Object o = ds.read(); -// VH5Dataset vds = new VH5Dataset(ds); +// MovingBoundaryVH5Dataset vds = new MovingBoundaryVH5Dataset(ds); // vds.info(); /* @@ -117,7 +117,7 @@ public void run() { } private void dtype(String ...name) { - VH5TypedPath dpath = new VH5TypedPath(root, double[].class,name); + MovingBoundardyVH5TypedPath dpath = new MovingBoundardyVH5TypedPath(root, double[].class,name); System.out.println(StringUtils.join(name,'/') + ' ' + Arrays.toString(dpath.get())); } @@ -125,7 +125,7 @@ private void dtype(String ...name) { @Test public void badType () { assertThrows(UnsupportedOperationException.class, () -> { - VH5TypedPath ipath = new VH5TypedPath(root, int[].class,"elements" ,"volume"); + MovingBoundardyVH5TypedPath ipath = new MovingBoundardyVH5TypedPath(root, int[].class,"elements" ,"volume"); System.out.println(ipath); }); } @@ -133,7 +133,7 @@ public void badType () { @Test public void primType () { assertThrows(UnsupportedOperationException.class, () -> { - VH5TypedPath ipath = new VH5TypedPath(root, int.class,"elements" ,"volume"); + MovingBoundardyVH5TypedPath ipath = new MovingBoundardyVH5TypedPath(root, int.class,"elements" ,"volume"); System.out.println(ipath); }); } diff --git a/vcell-core/src/test/java/cbit/vcell/solvers/mb/H5Client.java b/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryH5Client.java similarity index 87% rename from vcell-core/src/test/java/cbit/vcell/solvers/mb/H5Client.java rename to vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryH5Client.java index 334fe3510e..4b7ab22745 100644 --- a/vcell-core/src/test/java/cbit/vcell/solvers/mb/H5Client.java +++ b/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryH5Client.java @@ -1,26 +1,24 @@ package cbit.vcell.solvers.mb; -import java.util.List; - -import org.apache.commons.lang3.StringUtils; - import cbit.vcell.resource.NativeLib; import cbit.vcell.resource.PropertyLoader; -import cbit.vcell.resource.ResourceUtil; import ncsa.hdf.object.DataFormat; +import org.apache.commons.lang3.StringUtils; + +import java.util.List; /** * setup logging, load HDF5 native * @author GWeatherby * */ -public class H5Client { +public class MovingBoundaryH5Client { // protected static String FILE = "nformat.h5"; protected static String FILE = "nformat2.h5"; // protected static String FILE = "fig43-10-1.h5"; - public H5Client() { + public MovingBoundaryH5Client() { PropertyLoader.setProperty(PropertyLoader.installationRoot, "."); NativeLib.HDF5.load(); } diff --git a/vcell-core/src/test/java/cbit/vcell/solvers/mb/H5FileStructure.java b/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryH5FileStructureTest.java similarity index 97% rename from vcell-core/src/test/java/cbit/vcell/solvers/mb/H5FileStructure.java rename to vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryH5FileStructureTest.java index 6b800c7ccd..06acea76b7 100644 --- a/vcell-core/src/test/java/cbit/vcell/solvers/mb/H5FileStructure.java +++ b/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryH5FileStructureTest.java @@ -45,7 +45,7 @@ * @version 2.4 */ @Tag("Fast") -public class H5FileStructure extends H5Client { +public class MovingBoundaryH5FileStructureTest extends MovingBoundaryH5Client { private static String fname = FILE; private static long[] dims2D = {20, 10}; private static long[] dims3D = {20, 10, 5}; @@ -110,7 +110,7 @@ private static void printGroup(Group g, String indent) throws Exception{ Dataset ds = CastingUtils.downcast(Dataset.class, obj); if(ds != null && ds.getName().equals("elements")){ // if (ds != null && ds.getName().equals("boundaries")) { - VH5Dataset vds = new VH5Dataset(ds); + MovingBoundaryVH5Dataset vds = new MovingBoundaryVH5Dataset(ds); vds.info(); vds.meta(); } diff --git a/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryResultTest.java b/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryResultTest.java index 123a551595..0a6abd8c28 100644 --- a/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryResultTest.java +++ b/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryResultTest.java @@ -12,7 +12,7 @@ @Disabled @Tag("Fast") -public class MovingBoundaryResultTest extends H5Client { +public class MovingBoundaryResultTest extends MovingBoundaryH5Client { private static String fname = FILE; MovingBoundaryReader mbr; public MovingBoundaryResultTest() { diff --git a/vcell-core/src/test/java/cbit/vcell/solvers/mb/VH5Dataset.java b/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryVH5Dataset.java similarity index 94% rename from vcell-core/src/test/java/cbit/vcell/solvers/mb/VH5Dataset.java rename to vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryVH5Dataset.java index aa632ba6e1..1380d2ac06 100644 --- a/vcell-core/src/test/java/cbit/vcell/solvers/mb/VH5Dataset.java +++ b/vcell-core/src/test/java/cbit/vcell/solvers/mb/MovingBoundaryVH5Dataset.java @@ -18,10 +18,10 @@ import ncsa.hdf.object.h5.H5Datatype; import ncsa.hdf.object.h5.H5ScalarDS; -public class VH5Dataset { +public class MovingBoundaryVH5Dataset { private final Dataset dataset; - public VH5Dataset(Dataset dataset){ + public MovingBoundaryVH5Dataset(Dataset dataset){ super(); this.dataset = dataset; dataset.init(); @@ -77,7 +77,7 @@ public static void info(H5ScalarDS ds) throws Exception{ // dt = dt.getBasetype(); System.out.println(nt.getFullName()); System.out.println(nt.getDatatypeDescription()); - System.out.println(H5Client.parseMeta(dt)); + System.out.println(MovingBoundaryH5Client.parseMeta(dt)); // ds.init(); // int did = ds.open(); From e4f983cdc0fbce3782549214140d140575bcd758 Mon Sep 17 00:00:00 2001 From: Jim Schaff Date: Fri, 26 Apr 2024 18:52:25 -0400 Subject: [PATCH 15/16] bring together ASCIIExporter related HDF5 into ASCIIExporter --- .../vcell/export/server/ASCIIExporter.java | 135 ++++++++++++++++-- 1 file changed, 125 insertions(+), 10 deletions(-) diff --git a/vcell-core/src/main/java/cbit/vcell/export/server/ASCIIExporter.java b/vcell-core/src/main/java/cbit/vcell/export/server/ASCIIExporter.java index 58c768140a..ebdcf42d5a 100644 --- a/vcell-core/src/main/java/cbit/vcell/export/server/ASCIIExporter.java +++ b/vcell-core/src/main/java/cbit/vcell/export/server/ASCIIExporter.java @@ -38,7 +38,6 @@ import cbit.vcell.geometry.SinglePoint; import cbit.vcell.math.VariableType; import cbit.vcell.simdata.DataServerImpl; -import cbit.vcell.simdata.Hdf5Utils; import cbit.vcell.simdata.OutputContext; import cbit.vcell.simdata.ParticleDataBlock; import cbit.vcell.simdata.SimDataBlock; @@ -75,6 +74,122 @@ public ASCIIExporter(ExportServiceImpl exportServiceImpl){ this.exportServiceImpl = exportServiceImpl; } + /** + * Insert a dataset at the specififed group where the data are doubles (as a java List) + * + * @param hdf5GroupID the id of the group to apply the dataset to + * @param dataspaceName name of the dataset + * @param dims dimentional meansurements + * @param data the data to fill the dataset + * @throws NullPointerException (unsure how this occurs) + * @throws HDF5Exception if the hdf5 library encounters something unusual + */ + public static void insertDoubles(int hdf5GroupID,String dataspaceName,long[] dims,List data) throws NullPointerException, HDF5Exception { + double[] hdfData = org.apache.commons.lang.ArrayUtils.toPrimitive(((ArrayList)data).toArray(new Double[0])); + int hdf5DataspaceID = H5.H5Screate_simple(dims.length, dims, null); + int hdf5DatasetID = H5.H5Dcreate(hdf5GroupID, dataspaceName,HDF5Constants.H5T_NATIVE_DOUBLE, hdf5DataspaceID,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + H5.H5Dwrite_double(hdf5DatasetID, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, hdfData); + H5.H5Dclose(hdf5DatasetID); + H5.H5Sclose(hdf5DataspaceID); + } + + /** + * Insert a dataset at the specififed group where the data are doubles (as an array) + * + * @param hdf5GroupID the id of the group to apply the dataset to + * @param dataspaceName name of the dataset + * @param dims dimentional meansurements + * @param data the data to fill the dataset + * @throws NullPointerException (unsure how this occurs) + * @throws HDF5Exception if the hdf5 library encounters something unusual + */ + public static void insertDoubles(int hdf5GroupID,String dataspaceName,long[] dims,double[] data) throws NullPointerException, HDF5Exception { + int hdf5DataspaceID = H5.H5Screate_simple(dims.length, dims, null); + int hdf5DatasetID = H5.H5Dcreate(hdf5GroupID, dataspaceName,HDF5Constants.H5T_NATIVE_DOUBLE, hdf5DataspaceID,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + H5.H5Dwrite_double(hdf5DatasetID, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, (double[])data); + H5.H5Dclose(hdf5DatasetID); + H5.H5Sclose(hdf5DataspaceID); + } + + /** + * Insert a dataset at the specififed group where the data are integers + * + * @param hdf5GroupID the id of the group to apply the dataset to + * @param dataspaceName name of the dataset + * @param dims dimentional meansurements + * @param data the data to fill the dataset + * @throws NullPointerException (unsure how this occurs) + * @throws HDF5Exception if the hdf5 library encounters something unusual + */ + public static void insertInts(int hdf5GroupID,String dataspaceName,long[] dims,int[] data) throws NullPointerException, HDF5Exception { + int hdf5DataspaceID = H5.H5Screate_simple(dims.length, dims, null); + int hdf5DatasetID = H5.H5Dcreate(hdf5GroupID, dataspaceName,HDF5Constants.H5T_NATIVE_INT, hdf5DataspaceID,HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); + H5.H5Dwrite_int(hdf5DatasetID, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, (int[])data); + H5.H5Dclose(hdf5DatasetID); + H5.H5Sclose(hdf5DataspaceID); + } + + /** + * Insert a dataset at the specififed group where the data are strings + * + * @param hdf5GroupID the id of the group to apply the dataset to + * @param datasetName name of the dataset + * @param dims dimentional meansurements + * @param data the data to fill the dataset + * @throws NullPointerException (unsure how this occurs) + * @throws HDF5Exception if the hdf5 library encounters something unusual + */ + public static void insertStrings(int hdf5GroupID,String datasetName,long[] dims,List data) throws NullPointerException, HDF5Exception { + int largestStrLen = 0; + for(int i=0;i(Arrays.asList(new String[] {data}))); + //String[] attr = data.toArray(new String[0]); + + String attr = data + '\u0000'; + + //https://support.hdfgroup.org/ftp/HDF5/examples/misc-examples/vlstra.c + int h5attrcs1 = H5.H5Tcopy(HDF5Constants.H5T_C_S1); + H5.H5Tset_size (h5attrcs1, attr.length() /*HDF5Constants.H5T_VARIABLE*/); + int dataspace_id = -1; + //dataspace_id = H5.H5Screate_simple(dims.length, dims,null); + dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR); + int attribute_id = H5.H5Acreate(hdf5GroupID, attributeName, h5attrcs1, dataspace_id, HDF5Constants.H5P_DEFAULT,HDF5Constants.H5P_DEFAULT); + H5.H5Awrite(attribute_id, h5attrcs1, attr.getBytes()); + H5.H5Sclose(dataspace_id); + H5.H5Aclose(attribute_id); + H5.H5Tclose(h5attrcs1); + } + + /** * @throws IOException * @deprecated @@ -531,8 +646,8 @@ private List exportPDEData(OutputContext outputContext, long jobID for(int st = beginTimeIndex; st <= endTimeIndex; st++){ subTimes[st - beginTimeIndex] = allTimes[st]; } - Hdf5Utils.insertDoubles(hdf5GroupID, PCS.TIMES.name(), new long[]{subTimes.length}, subTimes);//Hdf5Utils.writeHDF5Dataset(hdf5GroupID, PCS.TIMES.name(), new long[] {subTimes.length}, subTimes,false); - Hdf5Utils.insertInts(hdf5GroupID, PCS.TIMEBOUNDS.name(), new long[]{2}, new int[]{beginTimeIndex, endTimeIndex});//Hdf5Utils.writeHDF5Dataset(hdf5GroupID, PCS.TIMEBOUNDS.name(), new long[] {2}, new int[] {beginTimeIndex,endTimeIndex},false); + insertDoubles(hdf5GroupID, PCS.TIMES.name(), new long[]{subTimes.length}, subTimes);//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupID, PCS.TIMES.name(), new long[] {subTimes.length}, subTimes,false); + insertInts(hdf5GroupID, PCS.TIMEBOUNDS.name(), new long[]{2}, new int[]{beginTimeIndex, endTimeIndex});//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupID, PCS.TIMEBOUNDS.name(), new long[] {2}, new int[] {beginTimeIndex,endTimeIndex},false); } switch(geometrySpecs.getModeID()){ @@ -846,10 +961,10 @@ private FileDataContainerID getCurveTimeSeries(int hdf5GroupVarID, PointsCurvesS if(hdf5GroupVarID != -1){ try { int hdf5GroupCurveID = H5.H5Gcreate(hdf5GroupVarID, getSpatialSelectionDescription(curve), HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); - Hdf5Utils.insertInts(hdf5GroupCurveID, PCS.CURVEINDEXES.name(), new long[]{((int[]) treePCS.get(PCS.CURVEINDEXES)).length}, (int[]) treePCS.get(PCS.CURVEINDEXES));//Hdf5Utils.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVEINDEXES.name(), new long[] {((int[])treePCS.get(PCS.CURVEINDEXES)).length}, (int[])treePCS.get(PCS.CURVEINDEXES),false); - Hdf5Utils.insertDoubles(hdf5GroupCurveID, PCS.CURVEDISTANCES.name(), new long[]{((double[]) treePCS.get(PCS.CURVEDISTANCES)).length}, (double[]) treePCS.get(PCS.CURVEDISTANCES));//Hdf5Utils.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVEDISTANCES.name(), new long[] {((double[])treePCS.get(PCS.CURVEDISTANCES)).length}, (double[])treePCS.get(PCS.CURVEDISTANCES),false); + insertInts(hdf5GroupCurveID, PCS.CURVEINDEXES.name(), new long[]{((int[]) treePCS.get(PCS.CURVEINDEXES)).length}, (int[]) treePCS.get(PCS.CURVEINDEXES));//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVEINDEXES.name(), new long[] {((int[])treePCS.get(PCS.CURVEINDEXES)).length}, (int[])treePCS.get(PCS.CURVEINDEXES),false); + insertDoubles(hdf5GroupCurveID, PCS.CURVEDISTANCES.name(), new long[]{((double[]) treePCS.get(PCS.CURVEDISTANCES)).length}, (double[]) treePCS.get(PCS.CURVEDISTANCES));//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVEDISTANCES.name(), new long[] {((double[])treePCS.get(PCS.CURVEDISTANCES)).length}, (double[])treePCS.get(PCS.CURVEDISTANCES),false); if(treePCS.get(PCS.CURVECROSSMEMBRINDEX) != null){ - Hdf5Utils.insertInts(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name(), new long[]{((int[]) treePCS.get(PCS.CURVECROSSMEMBRINDEX)).length}, (int[]) treePCS.get(PCS.CURVECROSSMEMBRINDEX));//Hdf5Utils.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name(), new long[] {((int[])treePCS.get(PCS.CURVECROSSMEMBRINDEX)).length}, (int[])treePCS.get(PCS.CURVECROSSMEMBRINDEX),false); + insertInts(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name(), new long[]{((int[]) treePCS.get(PCS.CURVECROSSMEMBRINDEX)).length}, (int[]) treePCS.get(PCS.CURVECROSSMEMBRINDEX));//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name(), new long[] {((int[])treePCS.get(PCS.CURVECROSSMEMBRINDEX)).length}, (int[])treePCS.get(PCS.CURVECROSSMEMBRINDEX),false); ArrayList crossPoints = new ArrayList(); for(int i = 0; i < crossingMembraneIndexes.length; i++){ if(crossingMembraneIndexes[i] != -1){ @@ -857,9 +972,9 @@ private FileDataContainerID getCurveTimeSeries(int hdf5GroupVarID, PointsCurvesS } } String attrText = PCS.CURVEVALS.name() + " columns " + crossPoints.get(0) + " and " + crossPoints.get(1) + " are added points of interpolation near membrane"; - Hdf5Utils.insertAttribute(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name() + " Info", attrText); //Hdf5Utils.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name()+" Info", null, attrText,true); + insertAttribute(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name() + " Info", attrText); //UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVECROSSMEMBRINDEX.name()+" Info", null, attrText,true); } - Hdf5Utils.insertDoubles(hdf5GroupCurveID, PCS.CURVEVALS.name(), new long[]{endIndex - beginIndex + 1, ((int[]) treePCS.get(PCS.CURVEINDEXES)).length}, (ArrayList) treePCS.get(PCS.CURVEVALS));//Hdf5Utils.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVEVALS.name(), new long[] {endIndex-beginIndex+1,((int[])treePCS.get(PCS.CURVEINDEXES)).length}, (ArrayList)treePCS.get(PCS.CURVEVALS),false); + insertDoubles(hdf5GroupCurveID, PCS.CURVEVALS.name(), new long[]{endIndex - beginIndex + 1, ((int[]) treePCS.get(PCS.CURVEINDEXES)).length}, (ArrayList) treePCS.get(PCS.CURVEVALS));//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupCurveID, PCS.CURVEVALS.name(), new long[] {endIndex-beginIndex+1,((int[])treePCS.get(PCS.CURVEINDEXES)).length}, (ArrayList)treePCS.get(PCS.CURVEVALS),false); H5.H5Gclose(hdf5GroupCurveID); } catch(Exception e){ throw new DataAccessException(e.getMessage(), e); @@ -1054,9 +1169,9 @@ private FileDataContainerID getPointsTimeSeries(PointsCurvesSlices pcs, int hdf5 } if(hdf5GroupID != -1){ long[] dimsCoord = new long[]{1, pointSpatialSelections.length}; - Hdf5Utils.insertStrings(hdf5GroupID, PCS.POINTINFO.name(), dimsCoord, (ArrayList) pcs.data.get(PCS.POINTINFO));//Hdf5Utils.writeHDF5Dataset(hdf5GroupID, PCS.POINTINFO.name(), dimsCoord, pcs.data.get(PCS.POINTINFO),false); + insertStrings(hdf5GroupID, PCS.POINTINFO.name(), dimsCoord, (ArrayList) pcs.data.get(PCS.POINTINFO));//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupID, PCS.POINTINFO.name(), dimsCoord, pcs.data.get(PCS.POINTINFO),false); long[] dimsValues = new long[]{hdfTimes.length, pointSpatialSelections.length}; - Hdf5Utils.insertDoubles(hdf5GroupID, PCS.POINTVALS.name(), dimsValues, hdfValues);//Hdf5Utils.writeHDF5Dataset(hdf5GroupID, PCS.POINTVALS.name(), dimsValues, hdfValues,false); + insertDoubles(hdf5GroupID, PCS.POINTVALS.name(), dimsValues, hdfValues);//UiTableExporterToHDF5.writeHDF5Dataset(hdf5GroupID, PCS.POINTVALS.name(), dimsValues, hdfValues,false); } } From 389bd5d9b27337c4b64f24322ae87949d76fd839 Mon Sep 17 00:00:00 2001 From: Jim Schaff Date: Fri, 26 Apr 2024 18:53:01 -0400 Subject: [PATCH 16/16] replace implicit HDF5 lib loading with explicit --- .../LocalVCellConnectionFactory.java | 39 ++++++------------- 1 file changed, 12 insertions(+), 27 deletions(-) diff --git a/vcell-server/src/main/java/cbit/vcell/message/server/bootstrap/LocalVCellConnectionFactory.java b/vcell-server/src/main/java/cbit/vcell/message/server/bootstrap/LocalVCellConnectionFactory.java index c89d37fea8..c76576718c 100644 --- a/vcell-server/src/main/java/cbit/vcell/message/server/bootstrap/LocalVCellConnectionFactory.java +++ b/vcell-server/src/main/java/cbit/vcell/message/server/bootstrap/LocalVCellConnectionFactory.java @@ -10,28 +10,27 @@ package cbit.vcell.message.server.bootstrap; -import java.io.File; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.vcell.db.ConnectionFactory; -import org.vcell.db.DatabaseService; -import org.vcell.db.KeyFactory; -import org.vcell.util.AuthenticationException; -import org.vcell.util.document.User; -import org.vcell.util.document.UserLoginInfo; - import cbit.vcell.export.server.ExportServiceImpl; import cbit.vcell.message.server.dispatcher.SimulationDatabaseDirect; import cbit.vcell.modeldb.AdminDBTopLevel; import cbit.vcell.modeldb.DatabaseServerImpl; +import cbit.vcell.resource.NativeLib; import cbit.vcell.resource.PropertyLoader; import cbit.vcell.server.ConnectionException; import cbit.vcell.server.VCellConnection; import cbit.vcell.server.VCellConnectionFactory; import cbit.vcell.simdata.Cachetable; import cbit.vcell.simdata.DataSetControllerImpl; -import ncsa.hdf.object.FileFormat; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.vcell.db.ConnectionFactory; +import org.vcell.db.DatabaseService; +import org.vcell.db.KeyFactory; +import org.vcell.util.AuthenticationException; +import org.vcell.util.document.User; +import org.vcell.util.document.UserLoginInfo; + +import java.io.File; /** * This type was created in VisualAge. */ @@ -67,7 +66,7 @@ public VCellConnection createVCellConnection(UserLoginInfo userLoginInfo) throws SimulationDatabaseDirect simulationDatabase = new SimulationDatabaseDirect(adminDbTopLevel, databaseServerImpl, bCache); ExportServiceImpl exportServiceImpl = new ExportServiceImpl(); LocalVCellConnection vcConn = new LocalVCellConnection(userLoginInfo, simulationDatabase, dataSetControllerImpl, exportServiceImpl); - linkHDFLib(); + NativeLib.HDF5.load(); return vcConn; } catch (Throwable exc) { lg.error(exc.getMessage(), exc); @@ -100,18 +99,4 @@ public String getAuth0MappedUser() { return ""; } - /** - * trigger loading of HDF library when running local - */ -private void linkHDFLib( ) { - try { //lifted from hdf5group website - Class fileclass = Class.forName("ncsa.hdf.object.h5.H5File"); - FileFormat fileformat = (FileFormat)fileclass.newInstance(); - if (fileformat != null) { - FileFormat.addFileFormat(FileFormat.FILE_TYPE_HDF5, fileformat); - } - } catch(Throwable t) { - lg.error(t); - } -} }