use of ncsa.hdf.object.FileFormat in project vcell by virtualcell.
the class DataSet method readHdf5VariableSolution.
static double[] readHdf5VariableSolution(File zipfile, String fileName, String varName) throws Exception {
File tempFile = null;
FileFormat solFile = null;
try {
tempFile = createTempHdf5File(zipfile, fileName);
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
solFile.open();
if (varName != null) {
String varPath = Hdf5Utils.getVarSolutionPath(varName);
HObject solObj = FileFormat.findObject(solFile, varPath);
if (solObj instanceof Dataset) {
Dataset dataset = (Dataset) solObj;
return (double[]) dataset.read();
}
}
} finally {
try {
if (solFile != null) {
solFile.close();
}
if (tempFile != null) {
if (!tempFile.delete()) {
System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath());
}
}
} catch (Exception e) {
// ignore
}
}
return null;
}
use of ncsa.hdf.object.FileFormat in project vcell by virtualcell.
the class DataSetControllerImpl method getDataProcessingOutput.
public static DataOperationResults getDataProcessingOutput(DataOperation dataOperation, File dataProcessingOutputFileHDF5) throws Exception {
DataOperationResults dataProcessingOutputResults = null;
FileFormat hdf5FileFormat = null;
try {
if (dataProcessingOutputFileHDF5.exists()) {
// retrieve an instance of H5File
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
if (fileFormat == null) {
throw new Exception("Cannot find HDF5 FileFormat.");
}
// open the file with read-only access
hdf5FileFormat = fileFormat.open(dataProcessingOutputFileHDF5.getAbsolutePath(), FileFormat.READ);
hdf5FileFormat.setMaxMembers(Simulation.MAX_LIMIT_SPATIAL_TIMEPOINTS);
// open the file and retrieve the file structure
hdf5FileFormat.open();
Group root = (Group) ((javax.swing.tree.DefaultMutableTreeNode) hdf5FileFormat.getRootNode()).getUserObject();
if (dataOperation instanceof DataProcessingOutputInfoOP) {
DataProcessingHelper dataProcessingHelper = new DataProcessingHelper();
iterateHDF5(root, "", dataProcessingHelper);
dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputInfo(dataOperation.getVCDataIdentifier(), dataProcessingHelper.getVarNames(), dataProcessingHelper.getVarISizes(), dataProcessingHelper.times, dataProcessingHelper.getVarUnits(), dataProcessingHelper.getPostProcessDataTypes(), dataProcessingHelper.getVarOrigins(), dataProcessingHelper.getVarExtents(), dataProcessingHelper.getVarStatValues());
// map function names to PostProcess state variable name
ArrayList<String> postProcessImageVarNames = new ArrayList<String>();
for (int i = 0; i < ((DataOperationResults.DataProcessingOutputInfo) dataProcessingOutputResults).getVariableNames().length; i++) {
String variableName = ((DataOperationResults.DataProcessingOutputInfo) dataProcessingOutputResults).getVariableNames()[i];
if (((DataOperationResults.DataProcessingOutputInfo) dataProcessingOutputResults).getPostProcessDataType(variableName).equals(DataOperationResults.DataProcessingOutputInfo.PostProcessDataType.image)) {
postProcessImageVarNames.add(variableName);
}
}
HashMap<String, String> mapFunctionNameToStateVarName = null;
if (((DataProcessingOutputInfoOP) dataOperation).getOutputContext() != null) {
mapFunctionNameToStateVarName = new HashMap<String, String>();
for (int i = 0; i < ((DataProcessingOutputInfoOP) dataOperation).getOutputContext().getOutputFunctions().length; i++) {
AnnotatedFunction annotatedFunction = ((DataProcessingOutputInfoOP) dataOperation).getOutputContext().getOutputFunctions()[i];
if (annotatedFunction.getFunctionType().equals(VariableType.POSTPROCESSING)) {
String[] symbols = annotatedFunction.getExpression().flatten().getSymbols();
// Find any PostProcess state var that matches a symbol in the function
for (int j = 0; j < symbols.length; j++) {
if (postProcessImageVarNames.contains(symbols[j])) {
mapFunctionNameToStateVarName.put(annotatedFunction.getName(), symbols[j]);
break;
}
}
}
}
}
if (mapFunctionNameToStateVarName != null && mapFunctionNameToStateVarName.size() > 0) {
dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputInfo(((DataOperationResults.DataProcessingOutputInfo) dataProcessingOutputResults), mapFunctionNameToStateVarName);
}
} else {
OutputContext outputContext = dataOperation.getOutputContext();
String[] variableNames = null;
DataIndexHelper dataIndexHelper = null;
TimePointHelper timePointHelper = null;
if (dataOperation instanceof DataOperation.DataProcessingOutputDataValuesOP) {
variableNames = new String[] { ((DataOperation.DataProcessingOutputDataValuesOP) dataOperation).getVariableName() };
dataIndexHelper = ((DataOperation.DataProcessingOutputDataValuesOP) dataOperation).getDataIndexHelper();
timePointHelper = ((DataOperation.DataProcessingOutputDataValuesOP) dataOperation).getTimePointHelper();
} else if (dataOperation instanceof DataOperation.DataProcessingOutputTimeSeriesOP) {
variableNames = ((DataOperation.DataProcessingOutputTimeSeriesOP) dataOperation).getTimeSeriesJobSpec().getVariableNames();
TimeSeriesJobSpec timeSeriesJobSpec = ((DataOperation.DataProcessingOutputTimeSeriesOP) dataOperation).getTimeSeriesJobSpec();
double[] specificTimepoints = extractTimeRange(((DataOperation.DataProcessingOutputTimeSeriesOP) dataOperation).getAllDatasetTimes(), timeSeriesJobSpec.getStartTime(), timeSeriesJobSpec.getEndTime());
timePointHelper = TimePointHelper.createSpecificTimePointHelper(specificTimepoints);
timeSeriesJobSpec.initIndices();
dataIndexHelper = DataIndexHelper.createSpecificDataIndexHelper(timeSeriesJobSpec.getIndices()[0]);
} else {
throw new Exception("Unknown Dataoperation " + dataOperation.getClass().getName());
}
if (variableNames.length != 1) {
throw new Exception("Only 1 variable request at a time");
}
AnnotatedFunction[] annotatedFunctions = (outputContext == null ? null : outputContext.getOutputFunctions());
AnnotatedFunction foundFunction = null;
if (annotatedFunctions != null) {
for (int i = 0; i < annotatedFunctions.length; i++) {
if (annotatedFunctions[i].getName().equals(variableNames[0])) {
foundFunction = annotatedFunctions[i];
break;
}
}
}
double[] alltimes = null;
if (foundFunction != null) {
DataOperationResults.DataProcessingOutputInfo dataProcessingOutputInfo = (DataOperationResults.DataProcessingOutputInfo) getDataProcessingOutput(new DataOperation.DataProcessingOutputInfoOP(dataOperation.getVCDataIdentifier(), false, dataOperation.getOutputContext()), dataProcessingOutputFileHDF5);
alltimes = dataProcessingOutputInfo.getVariableTimePoints();
FunctionHelper functionHelper = getPostProcessStateVariables(foundFunction, dataProcessingOutputInfo);
DataProcessingHelper dataProcessingHelper = new DataProcessingHelper(functionHelper.postProcessStateVars, timePointHelper, dataIndexHelper);
iterateHDF5(root, "", dataProcessingHelper);
dataProcessingOutputResults = evaluatePostProcessFunction(dataProcessingOutputInfo, functionHelper.postProcessStateVars, dataProcessingHelper.specificDataValues, dataIndexHelper, timePointHelper, functionHelper.flattenedBoundExpression, variableNames[0]);
} else {
DataProcessingHelper dataProcessingHelper = new DataProcessingHelper(new String[] { variableNames[0] }, timePointHelper, dataIndexHelper);
iterateHDF5(root, "", dataProcessingHelper);
alltimes = dataProcessingHelper.times;
if (dataProcessingHelper.specificDataValues == null) {
throw new Exception("Couldn't find postprocess data as specified for var=" + variableNames[0]);
}
dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputDataValues(dataOperation.getVCDataIdentifier(), variableNames[0], timePointHelper, dataIndexHelper, dataProcessingHelper.specificDataValues[0]);
}
if (dataOperation instanceof DataOperation.DataProcessingOutputTimeSeriesOP) {
TimeSeriesJobResults timeSeriesJobResults = null;
DataProcessingOutputTimeSeriesOP dataProcessingOutputTimeSeriesOP = (DataOperation.DataProcessingOutputTimeSeriesOP) dataOperation;
// [time][data]
double[][] dataValues = ((DataOperationResults.DataProcessingOutputDataValues) dataProcessingOutputResults).getDataValues();
double[] desiredTimes = (timePointHelper.isAllTimePoints() ? alltimes : timePointHelper.getTimePoints());
double[][][] timeSeriesFormatedValuesArr = new double[variableNames.length][dataIndexHelper.getDataIndexes().length + 1][desiredTimes.length];
for (int i = 0; i < timeSeriesFormatedValuesArr.length; i++) {
// var
for (int j = 0; j < timeSeriesFormatedValuesArr[i].length; j++) {
// index
if (j == 0) {
timeSeriesFormatedValuesArr[i][j] = desiredTimes;
continue;
}
for (int k = 0; k < timeSeriesFormatedValuesArr[i][j].length; k++) {
// time
// assume 1 variable for now
timeSeriesFormatedValuesArr[i][j][k] = dataValues[k][j - 1];
}
}
}
if (dataProcessingOutputTimeSeriesOP.getTimeSeriesJobSpec().isCalcSpaceStats()) {
SpatialStatsInfo spatialStatsInfo = new SpatialStatsInfo();
spatialStatsInfo.bWeightsValid = false;
timeSeriesJobResults = calculateStatisticsFromWhole(dataProcessingOutputTimeSeriesOP.getTimeSeriesJobSpec(), timeSeriesFormatedValuesArr, timePointHelper.getTimePoints(), spatialStatsInfo);
} else {
timeSeriesJobResults = new TSJobResultsNoStats(variableNames, new int[][] { dataIndexHelper.getDataIndexes() }, timePointHelper.getTimePoints(), timeSeriesFormatedValuesArr);
}
dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputTimeSeriesValues(dataOperation.getVCDataIdentifier(), timeSeriesJobResults);
}
}
} else {
throw new FileNotFoundException("Data Processing Output file '" + dataProcessingOutputFileHDF5.getPath() + "' not found");
}
} catch (Exception e) {
e.printStackTrace();
} finally {
if (hdf5FileFormat != null) {
try {
hdf5FileFormat.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
return dataProcessingOutputResults;
}
use of ncsa.hdf.object.FileFormat in project vcell by virtualcell.
the class LocalVCellConnectionFactory method linkHDFLib.
/**
* trigger loading of HDF library when running local
*/
private void linkHDFLib() {
try {
// lifted from hdf5group website
Class<?> fileclass = Class.forName("ncsa.hdf.object.h5.H5File");
FileFormat fileformat = (FileFormat) fileclass.newInstance();
if (fileformat != null) {
FileFormat.addFileFormat(FileFormat.FILE_TYPE_HDF5, fileformat);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
use of ncsa.hdf.object.FileFormat in project vcell by virtualcell.
the class CartesianMeshChombo method readMeshFile.
public static CartesianMeshChombo readMeshFile(File chomboMeshFile) throws Exception {
CartesianMeshChombo chomboMesh = new CartesianMeshChombo();
if (H5.H5open() < 0) {
throw new Exception("H5.H5open() failed");
}
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
if (fileFormat == null) {
throw new Exception("FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5) failed, returned null.");
}
FileFormat meshFile = null;
try {
meshFile = fileFormat.createInstance(chomboMeshFile.getAbsolutePath(), FileFormat.READ);
meshFile.open();
DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode) meshFile.getRootNode();
Group rootGroup = (Group) rootNode.getUserObject();
Group meshGroup = (Group) rootGroup.getMemberList().get(0);
List<Attribute> meshAttrList = meshGroup.getMetadata();
for (Attribute attr : meshAttrList) {
String attrName = attr.getName();
MeshAttribute mattr = null;
try {
mattr = MeshAttribute.valueOf(attrName);
} catch (IllegalArgumentException ex) {
}
if (mattr == null) {
// if not found, then we don't care about this attribute
logger.debug("mesh attribute " + attrName + " is not defined in Java");
continue;
}
Object value = attr.getValue();
switch(mattr) {
case dimension:
chomboMesh.dimension = ((int[]) value)[0];
break;
case numLevels:
chomboMesh.numLevels = ((int[]) value)[0];
break;
case viewLevel:
chomboMesh.viewLevel = ((int[]) value)[0];
break;
case refineRatios:
chomboMesh.refineRatios = (int[]) value;
break;
case Dx:
case extent:
case Nx:
case origin:
// these 4 has format of {};
String[] valueStrArray = (String[]) value;
String value0 = valueStrArray[0];
StringTokenizer st = new StringTokenizer(value0, "{,} ");
int numTokens = st.countTokens();
// we need 3 for 3d
double[] values = new double[Math.max(3, numTokens)];
for (int i = 0; i < Math.min(3, numTokens); ++i) {
String token = st.nextToken();
values[i] = Double.parseDouble(token);
}
switch(mattr) {
case Dx:
chomboMesh.dx = new double[3];
System.arraycopy(values, 0, chomboMesh.dx, 0, values.length);
break;
case extent:
chomboMesh.extent = new Extent(values[0], values[1], values[2] == 0 ? 1 : values[2]);
break;
case Nx:
chomboMesh.size = new ISize((int) values[0], (int) values[1], values[2] == 0 ? 1 : (int) values[2]);
break;
case origin:
chomboMesh.origin = new Origin(values[0], values[1], values[2]);
break;
}
break;
}
}
List<HObject> memberList = meshGroup.getMemberList();
for (HObject member : memberList) {
if (!(member instanceof Dataset)) {
continue;
}
Dataset dataset = (Dataset) member;
Vector vectValues = (Vector) dataset.read();
String name = dataset.getName();
MeshDataSet mdataset = null;
try {
mdataset = MeshDataSet.valueOfName(name);
} catch (IllegalArgumentException ex) {
logger.debug("mesh dataset " + name + " is not defined in Java");
}
if (mdataset == null) {
// if not found, then we don't care about this dataset
continue;
}
switch(mdataset) {
case vertices:
collectVertices(chomboMesh, vectValues);
break;
case segments:
collect2dSegments(chomboMesh, vectValues);
break;
case structures:
collectStructures(chomboMesh, vectValues);
break;
case featurephasevols:
collectFeaturePhaseVols(chomboMesh, vectValues);
break;
case membraneids:
collectMembraneIds(chomboMesh, vectValues);
break;
case membrane_elements:
case membrane_elements_old:
collectMembraneElements(chomboMesh, vectValues);
break;
case surface_triangles:
collect3dSurfaceTriangles(chomboMesh, vectValues);
break;
case slice_view:
collect3dSliceView(chomboMesh, vectValues);
break;
}
}
} finally {
if (meshFile != null) {
meshFile.close();
}
}
// set neighbors to membrane elements
if (chomboMesh.dimension == 2 && chomboMesh.membraneElements != null) {
for (int i = 0; i < chomboMesh.membraneElements.length; ++i) {
MembraneElement me = chomboMesh.membraneElements[i];
me.setConnectivity(chomboMesh.segments[i].prevNeigbhor, chomboMesh.segments[i].nextNeigbhor, -1, -1);
}
}
return chomboMesh;
}
use of ncsa.hdf.object.FileFormat in project vcell by virtualcell.
the class DataSet method readChomboExtrapolatedValues.
static double[] readChomboExtrapolatedValues(String varName, File pdeFile, File zipFile) throws IOException {
double[] data = null;
if (zipFile != null && isChombo(zipFile)) {
File tempFile = null;
FileFormat solFile = null;
try {
tempFile = createTempHdf5File(zipFile, pdeFile.getName());
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
solFile.open();
data = readChomboExtrapolatedValues(varName, solFile);
} catch (Exception e) {
throw new IOException(e.getMessage(), e);
} finally {
try {
if (solFile != null) {
solFile.close();
}
if (tempFile != null) {
if (!tempFile.delete()) {
System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath());
}
}
} catch (Exception e) {
// ignore
}
}
}
return data;
}
Aggregations