use of ncsa.hdf.object.Attribute in project vcell by virtualcell.
the class DataSet method readHdf5SolutionMetaData.
private void readHdf5SolutionMetaData(InputStream is) throws Exception {
File tempFile = null;
FileFormat solFile = null;
try {
tempFile = createTempHdf5File(is);
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
solFile.open();
DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode) solFile.getRootNode();
Group rootGroup = (Group) rootNode.getUserObject();
List<HObject> solGroups = rootGroup.getMemberList();
for (HObject memberGroup : solGroups) {
if (memberGroup instanceof Group && memberGroup.getName().equals("solution")) {
Group solGroup = (Group) memberGroup;
List<HObject> memberList = solGroup.getMemberList();
for (HObject member : memberList) {
if (!(member instanceof Dataset)) {
continue;
}
Dataset dataset = (Dataset) member;
String dsname = dataset.getName();
int vt = -1;
String domain = null;
List<Attribute> solAttrList = dataset.getMetadata();
for (Attribute attr : solAttrList) {
String attrName = attr.getName();
if (attrName.equals("variable type")) {
Object obj = attr.getValue();
vt = ((int[]) obj)[0];
} else if (attrName.equals("domain")) {
Object obj = attr.getValue();
domain = ((String[]) obj)[0];
}
}
long[] dims = dataset.getDims();
String varName = domain == null ? dsname : domain + Variable.COMBINED_IDENTIFIER_SEPARATOR + dsname;
dataBlockList.addElement(DataBlock.createDataBlock(varName, vt, (int) dims[0], 0));
}
break;
}
}
} finally {
try {
if (solFile != null) {
solFile.close();
}
if (tempFile != null) {
if (!tempFile.delete()) {
System.err.println("couldn't delete temp file " + tempFile);
}
}
} catch (Exception e) {
// ignore
}
}
}
use of ncsa.hdf.object.Attribute in project vcell by virtualcell.
the class DataSet method readMBSDataMetadata.
private void readMBSDataMetadata() throws Exception {
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
FileFormat solFile = null;
try {
solFile = fileFormat.createInstance(fileName, FileFormat.READ);
solFile.open();
DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode) solFile.getRootNode();
Group rootGroup = (Group) rootNode.getUserObject();
Group solutionGroup = null;
for (Object member : rootGroup.getMemberList()) {
String memberName = ((HObject) member).getName();
if (member instanceof Group) {
MBSDataGroup group = MBSDataGroup.valueOf(memberName);
if (group == MBSDataGroup.Solution) {
solutionGroup = (Group) member;
break;
}
}
}
if (solutionGroup == null) {
throw new Exception("Group " + MBSDataGroup.Solution + " not found");
}
// find any timeGroup
Group timeGroup = null;
for (Object member : solutionGroup.getMemberList()) {
String memberName = ((HObject) member).getName();
if (member instanceof Group && memberName.startsWith("time")) {
timeGroup = (Group) member;
break;
}
}
if (timeGroup == null) {
throw new Exception("No time group found");
}
// find all the datasets in that time group
for (Object member : timeGroup.getMemberList()) {
if (member instanceof Dataset) {
List<Attribute> solAttrList = ((Dataset) member).getMetadata();
int size = 0;
String varName = null;
VariableType varType = null;
for (Attribute attr : solAttrList) {
String attrName = attr.getName();
Object attrValue = attr.getValue();
if (attrName.equals(MSBDataAttribute.name.name())) {
varName = ((String[]) attrValue)[0];
} else if (attrName.equals(MSBDataAttribute.size.name())) {
size = ((int[]) attrValue)[0];
} else if (attrName.equals(MSBDataAttribute.type.name())) {
String vt = ((String[]) attrValue)[0];
if (vt.equals(MSBDataAttributeValue.Point.name())) {
varType = VariableType.POINT_VARIABLE;
} else if (vt.equals(MSBDataAttributeValue.Volume.name())) {
varType = VariableType.VOLUME;
} else if (vt.equals(MSBDataAttributeValue.PointSubDomain.name())) {
// Position for PointSubdomain
}
}
}
if (varType == VariableType.VOLUME) {
// only display volume
dataBlockList.addElement(DataBlock.createDataBlock(varName, varType.getType(), size, 0));
}
if (varType == VariableType.POINT_VARIABLE) {
// only display volume
dataBlockList.addElement(DataBlock.createDataBlock(varName, varType.getType(), size, 0));
}
}
}
} finally {
if (solFile != null) {
try {
solFile.close();
} catch (Exception e) {
// ignore
}
}
}
}
use of ncsa.hdf.object.Attribute in project vcell by virtualcell.
the class DataSetControllerImpl method getHDF5Attributes.
// uncomment it for Debug
// private static String DATASETNAME = "/";
// enum H5O_type {
// H5O_TYPE_UNKNOWN(-1), // Unknown object type
// H5O_TYPE_GROUP(0), // Object is a group
// H5O_TYPE_DATASET(1), // Object is a dataset
// H5O_TYPE_NAMED_DATATYPE(2), // Object is a named data type
// H5O_TYPE_NTYPES(3); // Number of different object types
// private static final Map<Integer, H5O_type> lookup = new HashMap<Integer, H5O_type>();
//
// static {
// for (H5O_type s : EnumSet.allOf(H5O_type.class))
// lookup.put(s.getCode(), s);
// }
//
// private int code;
//
// H5O_type(int layout_type) {
// this.code = layout_type;
// }
//
// public int getCode() {
// return this.code;
// }
//
// public static H5O_type get(int code) {
// return lookup.get(code);
// }
// }
//
// public static void do_iterate(File hdfFile) {
// int file_id = -1;
//
// // Open a file using default properties.
// try {
// file_id = H5.H5Fopen(hdfFile.getAbsolutePath(), HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
// }
// catch (Exception e) {
// e.printStackTrace();
// }
//
// // Begin iteration.
// System.out.println("Objects in root group:");
// try {
// if (file_id >= 0) {
// int count = (int)H5.H5Gn_members(file_id, DATASETNAME);
// String[] oname = new String[count];
// int[] otype = new int[count];
// int[] ltype = new int[count];
// long[] orefs = new long[count];
// H5.H5Gget_obj_info_all(file_id, DATASETNAME, oname, otype, ltype, orefs, HDF5Constants.H5_INDEX_NAME);
//
// // Get type of the object and display its name and type.
// for (int indx = 0; indx < otype.length; indx++) {
// switch (H5O_type.get(otype[indx])) {
// case H5O_TYPE_GROUP:
// System.out.println(" Group: " + oname[indx]);
// break;
// case H5O_TYPE_DATASET:
// System.out.println(" Dataset: " + oname[indx]);
// break;
// case H5O_TYPE_NAMED_DATATYPE:
// System.out.println(" Datatype: " + oname[indx]);
// break;
// default:
// System.out.println(" Unknown: " + oname[indx]);
// }
// }
// }
// }
// catch (Exception e) {
// e.printStackTrace();
// }
//
// // Close the file.
// try {
// if (file_id >= 0)
// H5.H5Fclose(file_id);
// }
// catch (Exception e) {
// e.printStackTrace();
// }
// }
// public static void populateHDF5(Group g, String indent,DataProcessingOutput0 dataProcessingOutput,boolean bVarStatistics,String imgDataName,Origin imgDataOrigin,Extent imgDataExtent) throws Exception
// {
// if (g == null)
// return;
//
// List members = g.getMemberList();
//
// int n = members.size();
// indent += " ";
// HObject obj = null;
//
// String nameAtt = "_name";
// String unitAtt = "_unit";
// for (int i=0; i<n; i++){
//
// obj = (HObject)members.get(i);
// //uncomment for Debug
// /*System.out.print(indent+obj+" ("+obj.getClass().getName()+") isGroup="+(obj instanceof Group));*/
// if(obj.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_VARIABLESTATISTICS)){
// List<Metadata> metaDataL = obj.getMetadata();
// if(metaDataL != null){
// HashMap<String, String> attrHashMap = getHDF5Attributes(obj);//map contains the same number of names and attributes
// String[] variableStatNames = null;
// String[] variableUnits = null;
// Iterator<String> attrIterTemp = attrHashMap.keySet().iterator();
// boolean bHasUnit = false;
// for (int j = 0; j < attrHashMap.size(); j++) {
// String compVal = attrIterTemp.next();
// if(compVal.contains(nameAtt) || compVal.contains(unitAtt)){
// bHasUnit = true;
// break;
// }
// }
// if(bHasUnit){
// variableStatNames = new String[attrHashMap.size()/2];
// variableUnits = new String[attrHashMap.size()/2];
// }else{
// variableStatNames = new String[attrHashMap.size()]; // old way
// }
// Iterator<String> attrIter = attrHashMap.keySet().iterator();
// for (int j = 0; j < attrHashMap.size(); j++) {
// String compVal = attrIter.next();
// int compVarIdx = Integer.parseInt(compVal.substring(5, 6));
// if(compVal.contains(nameAtt)){
// variableStatNames[compVarIdx] = attrHashMap.get(compVal);
// }else if(compVal.contains(unitAtt)){
// variableUnits[compVarIdx] = attrHashMap.get(compVal);
// }else{//old way for var names(e.g. comp_0 = abc) with no "_name" or "_unit"
// variableStatNames[compVarIdx] = attrHashMap.get(compVal);
// }
// }
// dataProcessingOutput.setVariableStatNames(variableStatNames);
// dataProcessingOutput.setVariableUnits(variableUnits);
// dataProcessingOutput.setVariableStatValues(new double[variableStatNames.length][dataProcessingOutput.getTimes().length]);
// bVarStatistics = true;
// }
// }else if(obj instanceof H5ScalarDS){
// H5ScalarDS h5ScalarDS = (H5ScalarDS)obj;
// h5ScalarDS.init();
// long[] dims = h5ScalarDS.getDims();
//
// //make sure all dimensions are selected for loading if 3D
// //note: for 3D, only 1st slice selected by default
// long[] selectedDims = h5ScalarDS.getSelectedDims();
// if(selectedDims != null && selectedDims.length > 2){
// //changes internal class variable used during read
// selectedDims[2] = dims[2];
// }
//
// //load all data
// Object data = h5ScalarDS.read();
//
// if(dims != null){
// if(dims.length > 1){
// //For HDF5View (x stored in index 1) and (y stored in index 0) so must switch back to normal assumption
// long dimsY = dims[0];
// dims[0] = dims[1];
// dims[1] = dimsY;
// }
// //uncomment for Debug
// /*System.out.print(" dims=(");
// for (int j = 0; j < dims.length; j++) {
// System.out.print((j>0?"x":"")+dims[j]);
// }
// System.out.print(")");*/
// }
//
// // System.out.print(" len="+times.length);
// if(obj.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMES)){
// double[] times = (double[])data;
// dataProcessingOutput.setTimes(times);
// }else if(bVarStatistics){
// double[] stats = (double[])data;
// int timeIndex = Integer.parseInt(obj.getName().substring("time".length()));
// for (int j = 0; j < stats.length; j++) {
// dataProcessingOutput.getVariableStatValues()[j][timeIndex] = stats[j];
// }
// }else{
// double min = ((double[])data)[0];
// double max = min;
// for (int j = 0; j < ((double[])data).length; j++) {
// min = Math.min(min, ((double[])data)[j]);
// max = Math.max(max, ((double[])data)[j]);
// }
// int xSize = (int)dims[0];
// int ySize = (int)(dims.length>1?dims[1]:1);
// int zSize = (int)(dims.length>2?dims[2]:1);
// SourceDataInfo sourceDataInfo =
// new SourceDataInfo(SourceDataInfo.RAW_VALUE_TYPE, (double[])data, (imgDataExtent==null?new Extent(1,1,1):imgDataExtent), (imgDataOrigin==null?null:imgDataOrigin), new Range(min, max), 0, xSize, 1, ySize, xSize, zSize, xSize*ySize);
// Vector<SourceDataInfo> otherData = dataProcessingOutput.getDataGenerators().get(imgDataName);
// int timeIndex = Integer.parseInt(obj.getName().substring(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMEPREFIX.length()));
// otherData.add(sourceDataInfo);
// if(otherData.size()-1 != timeIndex){
// throw new Exception("Error HDF5 parse: added data index does not match timeIndex");
// }
// }
// }else if (obj instanceof H5Group && !obj.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_POSTPROCESSING)){
// bVarStatistics = false;
// imgDataName = obj.getName();
// dataProcessingOutput.getDataGenerators().put(imgDataName, new Vector<SourceDataInfo>());
//
// List<Metadata> metaDataL = obj.getMetadata();
// if(metaDataL != null){//assume 6 attributes defining origin and extent
// HashMap<String, String> attrHashMap = getHDF5Attributes(obj);
// if(attrHashMap.size() == 2){
// imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), 0, 0);
// imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), 1, 1);//this is 1D, however the extentY, Z cannot take 0
// }
// else if(attrHashMap.size() == 4){
// imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), 0);
// imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), 1);//this is 2D, however the extentZ cannot take 0
// }
// else if(attrHashMap.size() == 6){
// imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINZ)));
// imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTZ)));
// }
// }
//
// }
// System.out.println();
//
// if (obj instanceof Group)
// {
// populateHDF5((Group)obj, indent,dataProcessingOutput,bVarStatistics,imgDataName,imgDataOrigin,imgDataExtent);
// }
// }
// }
private static HashMap<String, String> getHDF5Attributes(HObject hObject) throws Exception {
HashMap<String, String> attrHashMap = new HashMap<String, String>();
List<Metadata> metaDataL = hObject.getMetadata();
if (metaDataL != null) {
for (int j = 0; j < metaDataL.size(); j++) {
Attribute attr = (Attribute) metaDataL.get(j);
String attrValue = attr.toString(",");
// System.out.print(" "+attr.getName()+"='"+attrValue+"'");
attrHashMap.put(attr.getName(), attr.toString(","));
}
}
return attrHashMap;
}
use of ncsa.hdf.object.Attribute in project vcell by virtualcell.
the class DataSetControllerImpl method printInfo.
private static void printInfo(HObject hObject, String indent) throws Exception {
if (true) {
return;
}
System.out.println(indent + hObject.getName() + ":" + hObject.getClass().getName());
List metaDatas = hObject.getMetadata();
for (Object metaData : metaDatas) {
if (metaData instanceof Attribute) {
Attribute attribute = (Attribute) metaData;
System.out.println(indent + "metadata=" + attribute.getName() + " " + attribute.getType().getDatatypeDescription());
} else {
System.out.println(indent + "metadata=" + metaData.getClass().getName());
}
}
}
use of ncsa.hdf.object.Attribute in project vcell by virtualcell.
the class SimulationDataSpatialHdf5 method retrieveSimDataSet.
public SimDataSet retrieveSimDataSet(double time, String varName) throws Exception {
File tempFile = getTempSimHdf5File(time);
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
FileFormat solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
solFile.open();
DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode) solFile.getRootNode();
Group rootGroup = (Group) rootNode.getUserObject();
Group solGroup = (Group) rootGroup.getMemberList().get(0);
SimDataSet simDataSet = new SimDataSet();
List<HObject> memberList = solGroup.getMemberList();
for (HObject member : memberList) {
if (!(member instanceof Dataset)) {
continue;
}
Dataset dataset = (Dataset) member;
String dsname = dataset.getName();
if (!dsname.equals(varName)) {
continue;
}
simDataSet.solValues = (double[]) dataset.read();
List<Attribute> attrList = dataset.getMetadata();
for (Attribute attr : attrList) {
String attrName = attr.getName();
Object val = attr.getValue();
double dval = 0;
if (val instanceof double[]) {
dval = ((double[]) val)[0];
}
if (attrName.equals(SOLUTION_DATASET_ATTR_MAX_ERROR)) {
simDataSet.maxError = dval;
} else if (attrName.equals(SOLUTION_DATASET_ATTR_MEAN)) {
simDataSet.mean = dval;
} else if (attrName.equals(SOLUTION_DATASET_ATTR_RELATIVE_L2ERROR)) {
simDataSet.l2Error = dval;
} else if (attrName.equals(SOLUTION_DATASET_ATTR_SUM_VOLFRAC)) {
simDataSet.sumVolFrac = dval;
}
}
break;
}
return simDataSet;
}
Aggregations