Search in sources :

Example 1 with Metadata

use of ncsa.hdf.object.Metadata in project vcell by virtualcell.

the class DataSetControllerImpl method getHDF5Attributes.

// uncomment it for Debug
// private static String DATASETNAME = "/";
// enum H5O_type {
// H5O_TYPE_UNKNOWN(-1), // Unknown object type
// H5O_TYPE_GROUP(0), // Object is a group
// H5O_TYPE_DATASET(1), // Object is a dataset
// H5O_TYPE_NAMED_DATATYPE(2), // Object is a named data type
// H5O_TYPE_NTYPES(3); // Number of different object types
// private static final Map<Integer, H5O_type> lookup = new HashMap<Integer, H5O_type>();
// 
// static {
// for (H5O_type s : EnumSet.allOf(H5O_type.class))
// lookup.put(s.getCode(), s);
// }
// 
// private int code;
// 
// H5O_type(int layout_type) {
// this.code = layout_type;
// }
// 
// public int getCode() {
// return this.code;
// }
// 
// public static H5O_type get(int code) {
// return lookup.get(code);
// }
// }
// 
// public static void do_iterate(File hdfFile) {
// int file_id = -1;
// 
// // Open a file using default properties.
// try {
// file_id = H5.H5Fopen(hdfFile.getAbsolutePath(), HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
// }
// catch (Exception e) {
// e.printStackTrace();
// }
// 
// // Begin iteration.
// System.out.println("Objects in root group:");
// try {
// if (file_id >= 0) {
// int count = (int)H5.H5Gn_members(file_id, DATASETNAME);
// String[] oname = new String[count];
// int[] otype = new int[count];
// int[] ltype = new int[count];
// long[] orefs = new long[count];
// H5.H5Gget_obj_info_all(file_id, DATASETNAME, oname, otype, ltype, orefs, HDF5Constants.H5_INDEX_NAME);
// 
// // Get type of the object and display its name and type.
// for (int indx = 0; indx < otype.length; indx++) {
// switch (H5O_type.get(otype[indx])) {
// case H5O_TYPE_GROUP:
// System.out.println("  Group: " + oname[indx]);
// break;
// case H5O_TYPE_DATASET:
// System.out.println("  Dataset: " + oname[indx]);
// break;
// case H5O_TYPE_NAMED_DATATYPE:
// System.out.println("  Datatype: " + oname[indx]);
// break;
// default:
// System.out.println("  Unknown: " + oname[indx]);
// }
// }
// }
// }
// catch (Exception e) {
// e.printStackTrace();
// }
// 
// // Close the file.
// try {
// if (file_id >= 0)
// H5.H5Fclose(file_id);
// }
// catch (Exception e) {
// e.printStackTrace();
// }
// }
// public static void populateHDF5(Group g, String indent,DataProcessingOutput0 dataProcessingOutput,boolean bVarStatistics,String imgDataName,Origin imgDataOrigin,Extent imgDataExtent) throws Exception
// {
// if (g == null)
// return;
// 
// List members = g.getMemberList();
// 
// int n = members.size();
// indent += "    ";
// HObject obj = null;
// 
// String nameAtt = "_name";
// String unitAtt = "_unit";
// for (int i=0; i<n; i++){
// 
// obj = (HObject)members.get(i);
// //uncomment for Debug
// /*System.out.print(indent+obj+" ("+obj.getClass().getName()+") isGroup="+(obj instanceof Group));*/
// if(obj.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_VARIABLESTATISTICS)){
// List<Metadata> metaDataL = obj.getMetadata();
// if(metaDataL != null){
// HashMap<String, String> attrHashMap = getHDF5Attributes(obj);//map contains the same number of names and attributes
// String[] variableStatNames = null;
// String[] variableUnits = null;
// Iterator<String> attrIterTemp = attrHashMap.keySet().iterator();
// boolean bHasUnit = false;
// for (int j = 0; j < attrHashMap.size(); j++) {
// String compVal = attrIterTemp.next();
// if(compVal.contains(nameAtt) || compVal.contains(unitAtt)){
// bHasUnit = true;
// break;
// }
// }
// if(bHasUnit){
// variableStatNames = new String[attrHashMap.size()/2];
// variableUnits = new String[attrHashMap.size()/2];
// }else{
// variableStatNames = new String[attrHashMap.size()]; // old way
// }
// Iterator<String> attrIter = attrHashMap.keySet().iterator();
// for (int j = 0; j < attrHashMap.size(); j++) {
// String compVal = attrIter.next();
// int compVarIdx = Integer.parseInt(compVal.substring(5, 6));
// if(compVal.contains(nameAtt)){
// variableStatNames[compVarIdx] = attrHashMap.get(compVal);
// }else if(compVal.contains(unitAtt)){
// variableUnits[compVarIdx] = attrHashMap.get(compVal);
// }else{//old way for var names(e.g. comp_0 = abc) with no "_name" or "_unit"
// variableStatNames[compVarIdx] = attrHashMap.get(compVal);
// }
// }
// dataProcessingOutput.setVariableStatNames(variableStatNames);
// dataProcessingOutput.setVariableUnits(variableUnits);
// dataProcessingOutput.setVariableStatValues(new double[variableStatNames.length][dataProcessingOutput.getTimes().length]);
// bVarStatistics = true;
// }
// }else if(obj instanceof H5ScalarDS){
// H5ScalarDS h5ScalarDS = (H5ScalarDS)obj;
// h5ScalarDS.init();
// long[] dims = h5ScalarDS.getDims();
// 
// //make sure all dimensions are selected for loading if 3D
// //note: for 3D, only 1st slice selected by default
// long[] selectedDims = h5ScalarDS.getSelectedDims();
// if(selectedDims != null && selectedDims.length > 2){
// //changes internal class variable used during read
// selectedDims[2] = dims[2];
// }
// 
// //load all data
// Object data = h5ScalarDS.read();
// 
// if(dims != null){
// if(dims.length > 1){
// //For HDF5View (x stored in index 1) and (y stored in index 0) so must switch back to normal assumption
// long dimsY = dims[0];
// dims[0] = dims[1];
// dims[1] = dimsY;
// }
// //uncomment for Debug
// /*System.out.print(" dims=(");
// for (int j = 0; j < dims.length; j++) {
// System.out.print((j>0?"x":"")+dims[j]);
// }
// System.out.print(")");*/
// }
// 
// //        	System.out.print(" len="+times.length);
// if(obj.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMES)){
// double[] times = (double[])data;
// dataProcessingOutput.setTimes(times);
// }else if(bVarStatistics){
// double[] stats = (double[])data;
// int timeIndex = Integer.parseInt(obj.getName().substring("time".length()));
// for (int j = 0; j < stats.length; j++) {
// dataProcessingOutput.getVariableStatValues()[j][timeIndex] = stats[j];
// }
// }else{
// double min = ((double[])data)[0];
// double max = min;
// for (int j = 0; j < ((double[])data).length; j++) {
// min = Math.min(min, ((double[])data)[j]);
// max = Math.max(max, ((double[])data)[j]);
// }
// int xSize = (int)dims[0];
// int ySize = (int)(dims.length>1?dims[1]:1);
// int zSize = (int)(dims.length>2?dims[2]:1);
// SourceDataInfo sourceDataInfo =
// new SourceDataInfo(SourceDataInfo.RAW_VALUE_TYPE, (double[])data, (imgDataExtent==null?new Extent(1,1,1):imgDataExtent), (imgDataOrigin==null?null:imgDataOrigin), new Range(min, max), 0, xSize, 1, ySize, xSize, zSize, xSize*ySize);
// Vector<SourceDataInfo> otherData = dataProcessingOutput.getDataGenerators().get(imgDataName);
// int timeIndex = Integer.parseInt(obj.getName().substring(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMEPREFIX.length()));
// otherData.add(sourceDataInfo);
// if(otherData.size()-1 != timeIndex){
// throw new Exception("Error HDF5 parse: added data index does not match timeIndex");
// }
// }
// }else if (obj instanceof H5Group && !obj.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_POSTPROCESSING)){
// bVarStatistics = false;
// imgDataName = obj.getName();
// dataProcessingOutput.getDataGenerators().put(imgDataName, new Vector<SourceDataInfo>());
// 
// List<Metadata> metaDataL = obj.getMetadata();
// if(metaDataL != null){//assume 6 attributes defining origin and extent
// HashMap<String, String> attrHashMap = getHDF5Attributes(obj);
// if(attrHashMap.size() == 2){
// imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), 0, 0);
// imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), 1, 1);//this is 1D, however the extentY, Z cannot take 0
// }
// else if(attrHashMap.size() == 4){
// imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), 0);
// imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), 1);//this is 2D, however the extentZ cannot take 0
// }
// else if(attrHashMap.size() == 6){
// imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINZ)));
// imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTZ)));
// }
// }
// 
// }
// System.out.println();
// 
// if (obj instanceof Group)
// {
// populateHDF5((Group)obj, indent,dataProcessingOutput,bVarStatistics,imgDataName,imgDataOrigin,imgDataExtent);
// }
// }
// }
private static HashMap<String, String> getHDF5Attributes(HObject hObject) throws Exception {
    HashMap<String, String> attrHashMap = new HashMap<String, String>();
    List<Metadata> metaDataL = hObject.getMetadata();
    if (metaDataL != null) {
        for (int j = 0; j < metaDataL.size(); j++) {
            Attribute attr = (Attribute) metaDataL.get(j);
            String attrValue = attr.toString(",");
            // System.out.print(" "+attr.getName()+"='"+attrValue+"'");
            attrHashMap.put(attr.getName(), attr.toString(","));
        }
    }
    return attrHashMap;
}
Also used : HashMap(java.util.HashMap) Attribute(ncsa.hdf.object.Attribute) Metadata(ncsa.hdf.object.Metadata)

Aggregations

HashMap (java.util.HashMap)1 Attribute (ncsa.hdf.object.Attribute)1 Metadata (ncsa.hdf.object.Metadata)1