use of ncsa.hdf.object.Group in project vcell by virtualcell.
the class VH5Path method walk.
/**
* find next object in sequence
* @param hobj previous element in sequence
* @param steps name of each step
* @param index current step
* @return next object path, if present
* @throws HDF5Exception
*/
private static Object walk(Object hobj, String[] steps, int index) throws Exception {
final boolean isLastIndex = lastIndex(index, steps);
final String finding = steps[index];
Group g = BeanUtils.downcast(Group.class, hobj);
if (g != null) {
List<HObject> ml = g.getMemberList();
for (HObject sub : ml) {
// String full = sub.getFullName();
if (finding.equals(sub.getName())) {
if (isLastIndex) {
return sub;
}
return walk(sub, steps, index + 1);
}
}
}
H5CompoundDS cds = BeanUtils.downcast(H5CompoundDS.class, hobj);
if (cds != null) {
cds.read();
String[] mn = cds.getMemberNames();
for (int i = 0; i < mn.length; i++) {
if (finding.equals(mn[i])) {
Object c = cds.read();
Vector<?> vec = BeanUtils.downcast(Vector.class, c);
if (vec != null) {
VCAssert.assertTrue(i < vec.size(), "Disconnect between H5CompoundDS.getMemberNames( ) and returned Vector");
Object child = vec.get(i);
if (isLastIndex) {
return child;
}
} else {
throw new UnsupportedOperationException("Unsupported H5CompoundDS subtype " + className(c));
}
}
}
}
if (isLastIndex) {
DataFormat df = BeanUtils.downcast(DataFormat.class, hobj);
if (df != null && df.hasAttribute()) {
try {
@SuppressWarnings("unchecked") List<Object> meta = df.getMetadata();
for (Object o : meta) {
Attribute a = BeanUtils.downcast(Attribute.class, o);
if (a != null) {
if (finding.equals(a.getName())) {
return a.getValue();
}
} else {
lg.warn(concat(steps, finding) + " fetching metadata unexpected type " + className(o));
}
}
} catch (Exception e) {
throw new RuntimeException(concat(steps, finding) + " fetching metadata", e);
}
}
}
return null;
}
use of ncsa.hdf.object.Group in project vcell by virtualcell.
the class DataSet method readHdf5SolutionMetaData.
private void readHdf5SolutionMetaData(InputStream is) throws Exception {
File tempFile = null;
FileFormat solFile = null;
try {
tempFile = createTempHdf5File(is);
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
solFile.open();
DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode) solFile.getRootNode();
Group rootGroup = (Group) rootNode.getUserObject();
Group solGroup = (Group) rootGroup.getMemberList().get(0);
List<HObject> memberList = solGroup.getMemberList();
for (HObject member : memberList) {
if (!(member instanceof Dataset)) {
continue;
}
Dataset dataset = (Dataset) member;
String dsname = dataset.getName();
int vt = -1;
String domain = null;
List<Attribute> solAttrList = dataset.getMetadata();
for (Attribute attr : solAttrList) {
String attrName = attr.getName();
if (attrName.equals("variable type")) {
Object obj = attr.getValue();
vt = ((int[]) obj)[0];
} else if (attrName.equals("domain")) {
Object obj = attr.getValue();
domain = ((String[]) obj)[0];
}
}
long[] dims = dataset.getDims();
String varName = domain == null ? dsname : domain + Variable.COMBINED_IDENTIFIER_SEPARATOR + dsname;
dataBlockList.addElement(DataBlock.createDataBlock(varName, vt, (int) dims[0], 0));
}
} finally {
try {
if (solFile != null) {
solFile.close();
}
if (tempFile != null) {
if (!tempFile.delete()) {
System.err.println("couldn't delete temp file " + tempFile);
}
}
} catch (Exception e) {
// ignore
}
}
}
use of ncsa.hdf.object.Group in project vcell by virtualcell.
the class ChomboFileReader method readMembraneVarData.
//
// Membrane data are stored as a UCHC (or vcell) extension to the normal Chombo Data.
// ChomboMembraneVarData was formally called VCellSolution by Fei.
//
private static void readMembraneVarData(ChomboMeshData chomboMeshData, Group rootGroup) {
// I added solution and extrapolated_volumes group to hold all the solutions from vcell
String[] groups = new String[] { "solution" /*, "extrapolated_volumes"*/
};
for (String group : groups) {
try {
Group vcellGroup = Hdf5Reader.getChildGroup(rootGroup, group);
if (vcellGroup != null) {
List<HObject> children = vcellGroup.getMemberList();
for (HObject c : children) {
if (c instanceof Dataset) {
Dataset dataset = (Dataset) c;
String name = dataset.getName();
List<Attribute> solAttrList = dataset.getMetadata();
String domain = null;
for (Attribute attr : solAttrList) {
String attrName = attr.getName();
if (attrName.equals("domain")) {
Object obj = attr.getValue();
domain = ((String[]) obj)[0];
break;
}
}
ChomboMembraneVarData vcellSolution = new ChomboMembraneVarData(name, domain, (double[]) dataset.read());
chomboMeshData.addMembraneVarData(vcellSolution);
}
}
}
} catch (Exception ex) {
// it is ok if there is no vcell group
}
}
}
use of ncsa.hdf.object.Group in project vcell by virtualcell.
the class DataSetControllerImpl method getDataProcessingOutput.
public static DataOperationResults getDataProcessingOutput(DataOperation dataOperation, File dataProcessingOutputFileHDF5) throws Exception {
DataOperationResults dataProcessingOutputResults = null;
FileFormat hdf5FileFormat = null;
try {
if (dataProcessingOutputFileHDF5.exists()) {
// retrieve an instance of H5File
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
if (fileFormat == null) {
throw new Exception("Cannot find HDF5 FileFormat.");
}
// open the file with read-only access
hdf5FileFormat = fileFormat.open(dataProcessingOutputFileHDF5.getAbsolutePath(), FileFormat.READ);
hdf5FileFormat.setMaxMembers(Simulation.MAX_LIMIT_SPATIAL_TIMEPOINTS);
// open the file and retrieve the file structure
hdf5FileFormat.open();
Group root = (Group) ((javax.swing.tree.DefaultMutableTreeNode) hdf5FileFormat.getRootNode()).getUserObject();
if (dataOperation instanceof DataProcessingOutputInfoOP) {
DataProcessingHelper dataProcessingHelper = new DataProcessingHelper();
iterateHDF5(root, "", dataProcessingHelper);
dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputInfo(dataOperation.getVCDataIdentifier(), dataProcessingHelper.getVarNames(), dataProcessingHelper.getVarISizes(), dataProcessingHelper.times, dataProcessingHelper.getVarUnits(), dataProcessingHelper.getPostProcessDataTypes(), dataProcessingHelper.getVarOrigins(), dataProcessingHelper.getVarExtents(), dataProcessingHelper.getVarStatValues());
// map function names to PostProcess state variable name
ArrayList<String> postProcessImageVarNames = new ArrayList<String>();
for (int i = 0; i < ((DataOperationResults.DataProcessingOutputInfo) dataProcessingOutputResults).getVariableNames().length; i++) {
String variableName = ((DataOperationResults.DataProcessingOutputInfo) dataProcessingOutputResults).getVariableNames()[i];
if (((DataOperationResults.DataProcessingOutputInfo) dataProcessingOutputResults).getPostProcessDataType(variableName).equals(DataOperationResults.DataProcessingOutputInfo.PostProcessDataType.image)) {
postProcessImageVarNames.add(variableName);
}
}
HashMap<String, String> mapFunctionNameToStateVarName = null;
if (((DataProcessingOutputInfoOP) dataOperation).getOutputContext() != null) {
mapFunctionNameToStateVarName = new HashMap<String, String>();
for (int i = 0; i < ((DataProcessingOutputInfoOP) dataOperation).getOutputContext().getOutputFunctions().length; i++) {
AnnotatedFunction annotatedFunction = ((DataProcessingOutputInfoOP) dataOperation).getOutputContext().getOutputFunctions()[i];
if (annotatedFunction.getFunctionType().equals(VariableType.POSTPROCESSING)) {
String[] symbols = annotatedFunction.getExpression().flatten().getSymbols();
// Find any PostProcess state var that matches a symbol in the function
for (int j = 0; j < symbols.length; j++) {
if (postProcessImageVarNames.contains(symbols[j])) {
mapFunctionNameToStateVarName.put(annotatedFunction.getName(), symbols[j]);
break;
}
}
}
}
}
if (mapFunctionNameToStateVarName != null && mapFunctionNameToStateVarName.size() > 0) {
dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputInfo(((DataOperationResults.DataProcessingOutputInfo) dataProcessingOutputResults), mapFunctionNameToStateVarName);
}
} else {
OutputContext outputContext = dataOperation.getOutputContext();
String[] variableNames = null;
DataIndexHelper dataIndexHelper = null;
TimePointHelper timePointHelper = null;
if (dataOperation instanceof DataOperation.DataProcessingOutputDataValuesOP) {
variableNames = new String[] { ((DataOperation.DataProcessingOutputDataValuesOP) dataOperation).getVariableName() };
dataIndexHelper = ((DataOperation.DataProcessingOutputDataValuesOP) dataOperation).getDataIndexHelper();
timePointHelper = ((DataOperation.DataProcessingOutputDataValuesOP) dataOperation).getTimePointHelper();
} else if (dataOperation instanceof DataOperation.DataProcessingOutputTimeSeriesOP) {
variableNames = ((DataOperation.DataProcessingOutputTimeSeriesOP) dataOperation).getTimeSeriesJobSpec().getVariableNames();
TimeSeriesJobSpec timeSeriesJobSpec = ((DataOperation.DataProcessingOutputTimeSeriesOP) dataOperation).getTimeSeriesJobSpec();
double[] specificTimepoints = extractTimeRange(((DataOperation.DataProcessingOutputTimeSeriesOP) dataOperation).getAllDatasetTimes(), timeSeriesJobSpec.getStartTime(), timeSeriesJobSpec.getEndTime());
timePointHelper = TimePointHelper.createSpecificTimePointHelper(specificTimepoints);
timeSeriesJobSpec.initIndices();
dataIndexHelper = DataIndexHelper.createSpecificDataIndexHelper(timeSeriesJobSpec.getIndices()[0]);
} else {
throw new Exception("Unknown Dataoperation " + dataOperation.getClass().getName());
}
if (variableNames.length != 1) {
throw new Exception("Only 1 variable request at a time");
}
AnnotatedFunction[] annotatedFunctions = (outputContext == null ? null : outputContext.getOutputFunctions());
AnnotatedFunction foundFunction = null;
if (annotatedFunctions != null) {
for (int i = 0; i < annotatedFunctions.length; i++) {
if (annotatedFunctions[i].getName().equals(variableNames[0])) {
foundFunction = annotatedFunctions[i];
break;
}
}
}
double[] alltimes = null;
if (foundFunction != null) {
DataOperationResults.DataProcessingOutputInfo dataProcessingOutputInfo = (DataOperationResults.DataProcessingOutputInfo) getDataProcessingOutput(new DataOperation.DataProcessingOutputInfoOP(dataOperation.getVCDataIdentifier(), false, dataOperation.getOutputContext()), dataProcessingOutputFileHDF5);
alltimes = dataProcessingOutputInfo.getVariableTimePoints();
FunctionHelper functionHelper = getPostProcessStateVariables(foundFunction, dataProcessingOutputInfo);
DataProcessingHelper dataProcessingHelper = new DataProcessingHelper(functionHelper.postProcessStateVars, timePointHelper, dataIndexHelper);
iterateHDF5(root, "", dataProcessingHelper);
dataProcessingOutputResults = evaluatePostProcessFunction(dataProcessingOutputInfo, functionHelper.postProcessStateVars, dataProcessingHelper.specificDataValues, dataIndexHelper, timePointHelper, functionHelper.flattenedBoundExpression, variableNames[0]);
} else {
DataProcessingHelper dataProcessingHelper = new DataProcessingHelper(new String[] { variableNames[0] }, timePointHelper, dataIndexHelper);
iterateHDF5(root, "", dataProcessingHelper);
alltimes = dataProcessingHelper.times;
if (dataProcessingHelper.specificDataValues == null) {
throw new Exception("Couldn't find postprocess data as specified for var=" + variableNames[0]);
}
dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputDataValues(dataOperation.getVCDataIdentifier(), variableNames[0], timePointHelper, dataIndexHelper, dataProcessingHelper.specificDataValues[0]);
}
if (dataOperation instanceof DataOperation.DataProcessingOutputTimeSeriesOP) {
TimeSeriesJobResults timeSeriesJobResults = null;
DataProcessingOutputTimeSeriesOP dataProcessingOutputTimeSeriesOP = (DataOperation.DataProcessingOutputTimeSeriesOP) dataOperation;
// [time][data]
double[][] dataValues = ((DataOperationResults.DataProcessingOutputDataValues) dataProcessingOutputResults).getDataValues();
double[] desiredTimes = (timePointHelper.isAllTimePoints() ? alltimes : timePointHelper.getTimePoints());
double[][][] timeSeriesFormatedValuesArr = new double[variableNames.length][dataIndexHelper.getDataIndexes().length + 1][desiredTimes.length];
for (int i = 0; i < timeSeriesFormatedValuesArr.length; i++) {
// var
for (int j = 0; j < timeSeriesFormatedValuesArr[i].length; j++) {
// index
if (j == 0) {
timeSeriesFormatedValuesArr[i][j] = desiredTimes;
continue;
}
for (int k = 0; k < timeSeriesFormatedValuesArr[i][j].length; k++) {
// time
// assume 1 variable for now
timeSeriesFormatedValuesArr[i][j][k] = dataValues[k][j - 1];
}
}
}
if (dataProcessingOutputTimeSeriesOP.getTimeSeriesJobSpec().isCalcSpaceStats()) {
SpatialStatsInfo spatialStatsInfo = new SpatialStatsInfo();
spatialStatsInfo.bWeightsValid = false;
timeSeriesJobResults = calculateStatisticsFromWhole(dataProcessingOutputTimeSeriesOP.getTimeSeriesJobSpec(), timeSeriesFormatedValuesArr, timePointHelper.getTimePoints(), spatialStatsInfo);
} else {
timeSeriesJobResults = new TSJobResultsNoStats(variableNames, new int[][] { dataIndexHelper.getDataIndexes() }, timePointHelper.getTimePoints(), timeSeriesFormatedValuesArr);
}
dataProcessingOutputResults = new DataOperationResults.DataProcessingOutputTimeSeriesValues(dataOperation.getVCDataIdentifier(), timeSeriesJobResults);
}
}
} else {
throw new FileNotFoundException("Data Processing Output file '" + dataProcessingOutputFileHDF5.getPath() + "' not found");
}
} catch (Exception e) {
e.printStackTrace();
} finally {
if (hdf5FileFormat != null) {
try {
hdf5FileFormat.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
return dataProcessingOutputResults;
}
use of ncsa.hdf.object.Group in project vcell by virtualcell.
the class DataSet method readMBSData.
private double[] readMBSData(String varName, Double time) throws Exception {
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
FileFormat solFile = null;
double[] data = null;
try {
solFile = fileFormat.createInstance(fileName, FileFormat.READ);
solFile.open();
DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode) solFile.getRootNode();
Group rootGroup = (Group) rootNode.getUserObject();
Group solutionGroup = null;
for (Object member : rootGroup.getMemberList()) {
String memberName = ((HObject) member).getName();
if (member instanceof Group) {
MBSDataGroup group = MBSDataGroup.valueOf(memberName);
if (group == MBSDataGroup.Solution) {
solutionGroup = (Group) member;
break;
}
}
}
if (solutionGroup == null) {
throw new Exception("Group " + MBSDataGroup.Solution + " not found");
}
int varIndex = -1;
int size = 0;
for (int i = 0; i < dataBlockList.size(); ++i) {
DataBlock dataBlock = dataBlockList.get(i);
if (dataBlock.getVarName().equals(varName)) {
varIndex = i;
size = dataBlock.getSize();
break;
}
}
if (varIndex == -1) {
throw new Exception("Variable " + varName + " not found");
}
// find time group for that time
Group timeGroup = null;
for (Object member : solutionGroup.getMemberList()) {
if (member instanceof Group) {
Group group = (Group) member;
List<Attribute> dsAttrList = group.getMetadata();
Attribute timeAttribute = null;
for (Attribute attr : dsAttrList) {
if (attr.getName().equals(MSBDataAttribute.time.name())) {
timeAttribute = attr;
break;
}
}
if (timeAttribute != null) {
double t = ((double[]) timeAttribute.getValue())[0];
if (Math.abs(t - time) < 1e-8) {
timeGroup = group;
break;
}
}
}
}
if (timeGroup == null) {
throw new Exception("No time group found for time=" + time);
}
// find variable dataset
Dataset varDataset = null;
for (Object member : timeGroup.getMemberList()) {
if (member instanceof Dataset) {
List<Attribute> dsAttrList = ((Dataset) member).getMetadata();
String var = null;
for (Attribute attr : dsAttrList) {
if (attr.getName().equals(MSBDataAttribute.name.name())) {
var = ((String[]) attr.getValue())[0];
break;
}
}
if (var != null && var.equals(varName)) {
varDataset = (Dataset) member;
break;
}
}
}
if (varDataset == null) {
throw new Exception("Data for Variable " + varName + " at time " + time + " not found");
}
data = new double[size];
System.arraycopy((double[]) varDataset.getData(), 0, data, 0, size);
return data;
} finally {
if (solFile != null) {
try {
solFile.close();
} catch (Exception e) {
// ignore
}
}
}
}
Aggregations