use of ncsa.hdf.object.Group in project vcell by virtualcell.
the class CartesianMeshMovingBoundary method readMeshFile.
public static CartesianMeshMovingBoundary readMeshFile(File meshFile) throws Exception {
CartesianMeshMovingBoundary mesh = new CartesianMeshMovingBoundary();
if (H5.H5open() < 0) {
throw new Exception("H5.H5open() failed");
}
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
if (fileFormat == null) {
throw new Exception("FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5) failed, returned null.");
}
FileFormat meshH5File = null;
try {
meshH5File = fileFormat.createInstance(meshFile.getAbsolutePath(), FileFormat.READ);
meshH5File.open();
DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode) meshH5File.getRootNode();
Group rootGroup = (Group) rootNode.getUserObject();
Group meshGroup = null;
for (Object member : rootGroup.getMemberList()) {
if (member instanceof Group) {
Group g = (Group) member;
if (g.getName().equals(Group_Mesh))
;
{
meshGroup = g;
break;
}
}
}
if (meshGroup == null) {
throw new Exception(Group_Mesh + " group not found in mesh");
}
for (Object member : meshGroup.getMemberList()) {
if (member instanceof Dataset) {
Dataset ds = (Dataset) member;
Object data = ds.getData();
MeshDataset mds = MeshDataset.valueOf(ds.getName());
switch(mds) {
case dimension:
mesh.dimension = ((int[]) data)[0];
break;
case extent:
{
double[] darr = (double[]) data;
mesh.extent = new Extent(darr[0], darr[1], 0.5);
break;
}
case origin:
{
double[] darr = (double[]) data;
mesh.origin = new Origin(darr[0], darr[1], 0.5);
break;
}
case size:
{
int[] iarr = (int[]) data;
mesh.size = new ISize(iarr[0], iarr[1], 1);
break;
}
}
}
}
} finally {
if (meshH5File != null) {
meshH5File.close();
}
}
return mesh;
}
use of ncsa.hdf.object.Group in project vcell by virtualcell.
the class H5FileStructure method run.
// public static void main(String args[]) throws Exception {
@Test
public void run() {
// create the file and add groups ans dataset into the file
try {
// createFile();
// retrieve an instance of H5File
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
if (fileFormat == null) {
System.err.println("Cannot find HDF5 FileFormat.");
return;
}
// open the file with read-only access
FileFormat testFile = fileFormat.createInstance(fname, FileFormat.READ);
if (testFile == null) {
System.err.println("Failed to open file: " + fname);
return;
}
// open the file and retrieve the file structure
testFile.open();
Group root = (Group) ((javax.swing.tree.DefaultMutableTreeNode) testFile.getRootNode()).getUserObject();
printGroup(root, "");
// close file resource
testFile.close();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
use of ncsa.hdf.object.Group in project vcell by virtualcell.
the class H5FileStructure method createFile.
/**
* create the file and add groups and dataset into the file, which is the
* same as javaExample.H5DatasetCreate
*
* @see javaExample.HDF5DatasetCreate
* @throws Exception
*/
private static void createFile() throws Exception {
// retrieve an instance of H5File
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
if (fileFormat == null) {
System.err.println("Cannot find HDF5 FileFormat.");
return;
}
// create a new file with a given file name.
H5File testFile = (H5File) fileFormat.createFile(fname, FileFormat.FILE_CREATE_DELETE);
if (testFile == null) {
System.err.println("Failed to create file:" + fname);
return;
}
// open the file and retrieve the root group
testFile.open();
Group root = (Group) ((javax.swing.tree.DefaultMutableTreeNode) testFile.getRootNode()).getUserObject();
// create groups at the root
Group g1 = testFile.createGroup("integer arrays", root);
Group g2 = testFile.createGroup("float arrays", root);
// create 2D 32-bit (4 bytes) integer dataset of 20 by 10
Datatype dtype = testFile.createDatatype(Datatype.CLASS_INTEGER, 4, Datatype.NATIVE, Datatype.NATIVE);
Dataset dataset = testFile.createScalarDS("2D 32-bit integer 20x10", g1, dtype, dims2D, null, null, 0, null);
// create 3D 8-bit (1 byte) unsigned integer dataset of 20 by 10 by 5
dtype = testFile.createDatatype(Datatype.CLASS_INTEGER, 1, Datatype.NATIVE, Datatype.SIGN_NONE);
dataset = testFile.createScalarDS("3D 8-bit unsigned integer 20x10x5", g1, dtype, dims3D, null, null, 0, null);
// create 2D 64-bit (8 bytes) double dataset of 20 by 10
dtype = testFile.createDatatype(Datatype.CLASS_FLOAT, 8, Datatype.NATIVE, -1);
dataset = testFile.createScalarDS("2D 64-bit double 20x10", g2, dtype, dims2D, null, null, 0, null);
// create 3D 32-bit (4 bytes) float dataset of 20 by 10 by 5
dtype = testFile.createDatatype(Datatype.CLASS_FLOAT, 4, Datatype.NATIVE, -1);
dataset = testFile.createScalarDS("3D 32-bit float 20x10x5", g2, dtype, dims3D, null, null, 0, null);
// close file resource
testFile.close();
}
use of ncsa.hdf.object.Group in project vcell by virtualcell.
the class SimResultsViewer method createODEDataViewer.
/**
* Insert the method's description here.
* Creation date: (6/11/2004 2:33:44 PM)
* @return javax.swing.JPanel
* @throws DataAccessException
*/
private DataViewer createODEDataViewer() throws DataAccessException {
odeDataViewer = new ODEDataViewer();
odeDataViewer.setSimulation(getSimulation());
ODESolverResultSet odesrs = ((ODEDataManager) dataManager).getODESolverResultSet();
odeDataViewer.setOdeSolverResultSet(odesrs);
odeDataViewer.setNFSimMolecularConfigurations(((ODEDataManager) dataManager).getNFSimMolecularConfigurations());
odeDataViewer.setVcDataIdentifier(dataManager.getVCDataIdentifier());
if (getSimulation() != null) {
String ownerName = generateHDF5DescrOwner(getSimulation());
odeDataViewer.setHDF5DescriptionText(ownerName + ":" + simulation.getName());
}
// odeDataViewer.setXVarName(odeDataViewer.getODESolverPlotSpecificationPanel1().getXAxisComboBox_frm().getSelectedItem().toString());
//
// Example code for reading stats data from Stochastic multitrial non-histogram
//
FileFormat hdf5FileFormat = null;
File to = null;
try {
if (odeDataViewer.getOdeSolverResultSet() instanceof ODESimData) {
byte[] hdf5FileBytes = ((ODESimData) odeDataViewer.getOdeSolverResultSet()).getHdf5FileBytes();
if (hdf5FileBytes != null) {
to = File.createTempFile("odeStats_" + getSimulation().getSimulationInfo().getAuthoritativeVCSimulationIdentifier(), ".hdf5");
Files.write(hdf5FileBytes, to);
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
if (fileFormat == null) {
throw new Exception("Cannot find HDF5 FileFormat.");
}
// open the file with read-only access
hdf5FileFormat = fileFormat.createInstance(to.getAbsolutePath(), FileFormat.READ);
// open the file and retrieve the file structure
hdf5FileFormat.open();
Group root = (Group) ((javax.swing.tree.DefaultMutableTreeNode) hdf5FileFormat.getRootNode()).getUserObject();
List<HObject> postProcessMembers = ((Group) root).getMemberList();
for (HObject nextHObject : postProcessMembers) {
// System.out.println(nextHObject.getName()+"\n"+nextHObject.getClass().getName());
H5ScalarDS h5ScalarDS = (H5ScalarDS) nextHObject;
h5ScalarDS.init();
try {
long[] dims = h5ScalarDS.getDims();
System.out.println("---" + nextHObject.getName() + " " + nextHObject.getClass().getName() + " Dimensions=" + Arrays.toString(dims));
Object obj = h5ScalarDS.read();
if (dims.length == 2) {
// dims[0]=numTimes (will be the same as 'SimTimes' data length)
// dims[1]=numVars (will be the same as 'VarNames' data length)
// if name='StatMean' this is the same as the default data saved in the odeSolverresultSet
double[] columns = new double[(int) dims[1]];
for (int row = 0; row < dims[0]; row++) {
System.arraycopy(obj, row * columns.length, columns, 0, columns.length);
System.out.println(Arrays.toString(columns));
}
} else {
if (obj instanceof double[]) {
System.out.println(Arrays.toString((double[]) obj));
} else {
System.out.println(Arrays.toString((String[]) obj));
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
if (hdf5FileFormat != null) {
try {
hdf5FileFormat.close();
} catch (Exception e2) {
e2.printStackTrace();
}
}
if (to != null) {
try {
to.delete();
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
return odeDataViewer;
}
use of ncsa.hdf.object.Group in project vcell by virtualcell.
the class ODEDataInterfaceImpl method extractColumnMin.
@Override
public double[] extractColumnMin(String columnName) throws ExpressionException, ObjectNotFoundException {
FileFormat hdf5FileFormat = null;
File to = null;
try {
ODESolverResultSet osrs = getOdeSolverResultSet();
if (osrs instanceof ODESimData) {
byte[] hdf5FileBytes = ((ODESimData) getOdeSolverResultSet()).getHdf5FileBytes();
if (hdf5FileBytes != null) {
to = File.createTempFile("odeStats_" + simulationModelInfo.getSimulationName(), ".hdf5");
Files.write(hdf5FileBytes, to);
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
if (fileFormat == null) {
throw new Exception("Cannot find HDF5 FileFormat.");
}
// open the file with read-only access
hdf5FileFormat = fileFormat.createInstance(to.getAbsolutePath(), FileFormat.READ);
// open the file and retrieve the file structure
hdf5FileFormat.open();
Group root = (Group) ((javax.swing.tree.DefaultMutableTreeNode) hdf5FileFormat.getRootNode()).getUserObject();
List<HObject> postProcessMembers = ((Group) root).getMemberList();
for (HObject nextHObject : postProcessMembers) {
System.out.println(nextHObject.getName() + " " + nextHObject.getClass().getName());
H5ScalarDS h5ScalarDS = (H5ScalarDS) nextHObject;
h5ScalarDS.init();
try {
long[] dims = h5ScalarDS.getDims();
System.out.println("---" + nextHObject.getName() + " " + nextHObject.getClass().getName() + " Dimensions=" + Arrays.toString(dims));
Object obj = h5ScalarDS.read();
if (dims.length == 2) {
double[] columns = new double[(int) dims[1]];
for (int row = 0; row < dims[0]; row++) {
System.arraycopy(obj, row * columns.length, columns, 0, columns.length);
System.out.println(Arrays.toString(columns));
}
return null;
// return columns;
} else {
return null;
}
} catch (Exception e) {
return null;
}
}
}
}
} catch (Exception e) {
} finally {
if (hdf5FileFormat != null) {
try {
hdf5FileFormat.close();
} catch (Exception e2) {
e2.printStackTrace();
}
}
if (to != null) {
try {
to.delete();
} catch (Exception e2) {
e2.printStackTrace();
}
}
}
return null;
}
Aggregations