use of ncsa.hdf.object.Dataset in project vcell by virtualcell.
the class DataSetControllerImpl method iterateHDF5.
private static void iterateHDF5(HObject hObject, String indent, DataProcessingHelper dataProcessingHelper) throws Exception {
if (hObject instanceof Group) {
Group group = ((Group) hObject);
printInfo(group, indent);
if (group.getName().equals("/") || group.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_POSTPROCESSING)) {
List<HObject> postProcessMembers = ((Group) hObject).getMemberList();
for (HObject nextHObject : postProcessMembers) {
iterateHDF5(nextHObject, indent + " ", dataProcessingHelper);
}
} else if (group.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_VARIABLESTATISTICS) && dataProcessingHelper.isInfoOnly()) {
populateStatNamesAndUnits(hObject, dataProcessingHelper);
List<HObject> statDataAtEachTime = group.getMemberList();
dataProcessingHelper.statValues = new double[dataProcessingHelper.statVarNames.length][statDataAtEachTime.size()];
for (HObject nextStatData : statDataAtEachTime) {
printInfo(nextStatData, indent + " ");
// always get stats data when ask for info
processDims(nextStatData, dataProcessingHelper, false);
double[] stats = (double[]) dataProcessingHelper.tempData;
int timeIndex = Integer.parseInt(nextStatData.getName().substring("time".length()));
for (int j = 0; j < stats.length; j++) {
dataProcessingHelper.statValues[j][timeIndex] = stats[j];
}
}
} else {
// must be image data
if (dataProcessingHelper.isInfoOnly()) {
dataProcessingHelper.imageNames = new ArrayList<String>();
dataProcessingHelper.imageISize = new ArrayList<ISize>();
dataProcessingHelper.imageOrigin = new ArrayList<Origin>();
dataProcessingHelper.imageExtent = new ArrayList<Extent>();
Origin imgDataOrigin;
Extent imgDataExtent;
HashMap<String, String> attrHashMap = getHDF5Attributes(group);
if (attrHashMap.size() == 2) {
imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), 0, 0);
// this is 1D, however the extentY, Z cannot take 0
imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), 1, 1);
} else if (attrHashMap.size() == 4) {
imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), 0);
// this is 2D, however the extentZ cannot take 0
imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), 1);
} else if (attrHashMap.size() == 6) {
imgDataOrigin = new Origin(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_ORIGINZ)));
imgDataExtent = new Extent(Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTX)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTY)), Double.valueOf(attrHashMap.get(DATA_PROCESSING_OUTPUT_EXTENTZ)));
} else {
throw new Exception("Unexpected number of origin/extent values");
}
dataProcessingHelper.imageNames.add(hObject.getName());
dataProcessingHelper.imageOrigin.add(imgDataOrigin);
dataProcessingHelper.imageExtent.add(imgDataExtent);
// get ISize
processDims((H5ScalarDS) (((Group) hObject).getMemberList()).get(0), dataProcessingHelper, true);
long[] dims = dataProcessingHelper.tempDims;
ISize isize = new ISize((int) dims[0], (int) (dims.length > 1 ? dims[1] : 1), (int) (dims.length > 2 ? dims[2] : 1));
dataProcessingHelper.imageISize.add(isize);
} else {
int currentVarNameIndex = -1;
for (int i = 0; i < dataProcessingHelper.specificVarNames.length; i++) {
if (group.getName().equals(dataProcessingHelper.specificVarNames[i])) {
currentVarNameIndex = i;
break;
}
}
if (currentVarNameIndex == -1) {
// skip this group
return;
}
dataProcessingHelper.specificDataValues[currentVarNameIndex] = new double[(dataProcessingHelper.specificTimePointHelper.isAllTimePoints() ? dataProcessingHelper.times.length : dataProcessingHelper.specificTimePointHelper.getTimePoints().length)][];
List<HObject> imageDataAtEachTime = ((Group) hObject).getMemberList();
int foundTimePointIndex = 0;
for (HObject nextImageData : imageDataAtEachTime) {
// if(dataProcessingHelper.isInfoOnly()){
// printInfo(nextImageData,indent+" ");
// processDims(nextImageData, dataProcessingHelper,true);
// long[] dims = dataProcessingHelper.tempDims;
// ISize isize = new ISize((int)dims[0], (int)(dims.length>1?dims[1]:1), (int)(dims.length>2?dims[2]:1));
// dataProcessingHelper.imageISize.add(isize);
// break;//only need 1st one for info
// }else{
int hdf5GroupTimeIndex = Integer.parseInt(nextImageData.getName().substring(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMEPREFIX.length()));
if (dataProcessingHelper.specificTimePointHelper.isAllTimePoints() || dataProcessingHelper.specificTimePointHelper.getTimePoints()[foundTimePointIndex] == dataProcessingHelper.times[hdf5GroupTimeIndex]) {
int timeIndex = (dataProcessingHelper.specificTimePointHelper.isAllTimePoints() ? hdf5GroupTimeIndex : foundTimePointIndex);
processDims(nextImageData, dataProcessingHelper, false);
long[] dims = dataProcessingHelper.tempDims;
ISize isize = new ISize((int) dims[0], (int) (dims.length > 1 ? dims[1] : 1), (int) (dims.length > 2 ? dims[2] : 1));
if (dataProcessingHelper.specificDataIndexHelper.isAllDataIndexes()) {
dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex] = (double[]) dataProcessingHelper.tempData;
} else if (dataProcessingHelper.specificDataIndexHelper.isSingleSlice()) {
dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex] = new double[isize.getX() * isize.getY()];
System.arraycopy((double[]) dataProcessingHelper.tempData, dataProcessingHelper.specificDataIndexHelper.getSliceIndex() * (isize.getX() * isize.getY()), dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex], 0, isize.getX() * isize.getY());
} else {
dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex] = new double[dataProcessingHelper.specificDataIndexHelper.getDataIndexes().length];
for (int i = 0; i < dataProcessingHelper.specificDataIndexHelper.getDataIndexes().length; i++) {
dataProcessingHelper.specificDataValues[currentVarNameIndex][timeIndex][i] = ((double[]) dataProcessingHelper.tempData)[dataProcessingHelper.specificDataIndexHelper.getDataIndexes()[i]];
}
}
foundTimePointIndex++;
if (!dataProcessingHelper.specificTimePointHelper.isAllTimePoints() && foundTimePointIndex == dataProcessingHelper.specificTimePointHelper.getTimePoints().length) {
// break out after we get our data
break;
}
}
// }
}
}
}
} else if (hObject instanceof Dataset) {
Dataset dataset = (Dataset) hObject;
printInfo(dataset, indent);
if (dataset.getName().equals(SimDataConstants.DATA_PROCESSING_OUTPUT_EXTENSION_TIMES)) {
processDims(hObject, dataProcessingHelper, false);
dataProcessingHelper.times = (double[]) dataProcessingHelper.tempData;
}
} else if (hObject instanceof Datatype) {
printInfo(hObject, indent);
} else {
printInfo(hObject, indent);
}
}
use of ncsa.hdf.object.Dataset in project vcell by virtualcell.
the class SimDataReader method getNextDataAtCurrentTimeChombo.
private void getNextDataAtCurrentTimeChombo(double[][] returnValues) throws Exception {
if (zipFilenNames == null || zipFilenNames[masterTimeIndex] == null) {
return;
}
if (currentZipFile == null || !currentZipFileName.equals(zipFilenNames[masterTimeIndex])) {
close();
currentZipFile = new ZipFile(zipFilenNames[masterTimeIndex]);
currentZipFileName = zipFilenNames[masterTimeIndex];
}
File tempFile = null;
FileFormat solFile = null;
try {
tempFile = DataSet.createTempHdf5File(currentZipFile, simDataFileNames[masterTimeIndex]);
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
solFile.open();
for (int k = 0; k < varNames.length; ++k) {
try {
boolean bExtrapolatedValue = false;
String varName = varNames[k];
if (varName.endsWith(InsideVariable.INSIDE_VARIABLE_SUFFIX)) {
bExtrapolatedValue = true;
varName = varName.substring(0, varName.lastIndexOf(InsideVariable.INSIDE_VARIABLE_SUFFIX));
} else if (varName.endsWith(OutsideVariable.OUTSIDE_VARIABLE_SUFFIX)) {
bExtrapolatedValue = true;
varName = varName.substring(0, varName.lastIndexOf(OutsideVariable.OUTSIDE_VARIABLE_SUFFIX));
}
double[] sol = null;
if (bExtrapolatedValue) {
sol = DataSet.readChomboExtrapolatedValues(varName, solFile);
} else {
String varPath = Hdf5Utils.getVarSolutionPath(varNames[k]);
HObject solObj = FileFormat.findObject(solFile, varPath);
if (solObj instanceof Dataset) {
Dataset dataset = (Dataset) solObj;
sol = (double[]) dataset.read();
}
}
if (sol != null) {
for (int l = 0; l < varIndexes[k].length; ++l) {
int idx = varIndexes[k][l];
double val = sol[idx];
returnValues[k][l] = val;
}
}
} catch (Exception e) {
e.printStackTrace(System.out);
throw new DataAccessException(e.getMessage(), e);
}
}
} finally {
try {
if (solFile != null) {
solFile.close();
}
if (tempFile != null) {
if (!tempFile.delete()) {
System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath());
}
}
} catch (Exception e) {
// ignore
}
}
++masterTimeIndex;
if (masterTimeIndex >= times.length) {
close();
}
}
use of ncsa.hdf.object.Dataset in project vcell by virtualcell.
the class SimulationDataSpatialHdf5 method retrieveSimDataSet.
public SimDataSet retrieveSimDataSet(double time, String varName) throws Exception {
File tempFile = getTempSimHdf5File(time);
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
FileFormat solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
solFile.open();
DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode) solFile.getRootNode();
Group rootGroup = (Group) rootNode.getUserObject();
Group solGroup = (Group) rootGroup.getMemberList().get(0);
SimDataSet simDataSet = new SimDataSet();
List<HObject> memberList = solGroup.getMemberList();
for (HObject member : memberList) {
if (!(member instanceof Dataset)) {
continue;
}
Dataset dataset = (Dataset) member;
String dsname = dataset.getName();
if (!dsname.equals(varName)) {
continue;
}
simDataSet.solValues = (double[]) dataset.read();
List<Attribute> attrList = dataset.getMetadata();
for (Attribute attr : attrList) {
String attrName = attr.getName();
Object val = attr.getValue();
double dval = 0;
if (val instanceof double[]) {
dval = ((double[]) val)[0];
}
if (attrName.equals(SOLUTION_DATASET_ATTR_MAX_ERROR)) {
simDataSet.maxError = dval;
} else if (attrName.equals(SOLUTION_DATASET_ATTR_MEAN)) {
simDataSet.mean = dval;
} else if (attrName.equals(SOLUTION_DATASET_ATTR_RELATIVE_L2ERROR)) {
simDataSet.l2Error = dval;
} else if (attrName.equals(SOLUTION_DATASET_ATTR_SUM_VOLFRAC)) {
simDataSet.sumVolFrac = dval;
}
}
break;
}
return simDataSet;
}
use of ncsa.hdf.object.Dataset in project vcell by virtualcell.
the class SimulationDataSpatialHdf5 method readMeshFile.
public static ChomboMesh readMeshFile(File chomboMeshFile) throws Exception {
// if (chomboMesh != null)
// {
// return;
// }
ChomboMesh chomboMesh = new ChomboMesh();
// File mfile = new File(userDirectory, getMeshFileName());
if (H5.H5open() < 0) {
throw new Exception("H5.H5open() failed");
}
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
if (fileFormat == null) {
throw new Exception("FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5) failed, returned null.");
}
FileFormat meshFile = fileFormat.createInstance(chomboMeshFile.getAbsolutePath(), FileFormat.READ);
meshFile.open();
DefaultMutableTreeNode rootNode = (DefaultMutableTreeNode) meshFile.getRootNode();
Group rootGroup = (Group) rootNode.getUserObject();
Group meshGroup = (Group) rootGroup.getMemberList().get(0);
List<Attribute> meshAttrList = meshGroup.getMetadata();
for (Attribute attr : meshAttrList) {
String attrName = attr.getName();
Object value = attr.getValue();
if (attrName.equals(MESH_ATTR_DIMENSION)) {
chomboMesh.dimension = ((int[]) value)[0];
} else {
String[] valueStrArray = (String[]) value;
String value0 = valueStrArray[0];
StringTokenizer st = new StringTokenizer(value0, "{,} ");
List<Double> valueList = new ArrayList<Double>();
while (st.hasMoreTokens()) {
String token = st.nextToken();
valueList.add(Double.parseDouble(token));
}
if (attrName.equals(MESH_ATTR_DX)) {
for (int i = 0; i < valueList.size(); ++i) {
chomboMesh.dx[i] = valueList.get(i);
}
} else if (attrName.equals(MESH_ATTR_EXTENT)) {
for (int i = 0; i < valueList.size(); ++i) {
chomboMesh.extent[i] = valueList.get(i);
}
} else if (attrName.equals(MESH_ATTR_NX)) {
for (int i = 0; i < valueList.size(); ++i) {
chomboMesh.nx[i] = valueList.get(i).intValue();
}
} else if (attrName.equals(MESH_ATTR_ORIGIN)) {
for (int i = 0; i < valueList.size(); ++i) {
chomboMesh.origin[i] = valueList.get(i);
}
}
}
}
List<HObject> memberList = meshGroup.getMemberList();
for (HObject member : memberList) {
if (member instanceof Dataset) {
Dataset dataset = (Dataset) member;
Vector vectValues = (Vector) dataset.read();
String name = dataset.getName();
if (name.equals(BOXES_DATASET)) {
// not needed right now
} else if (name.equals(METRICS_DATASET)) {
H5CompoundDS compoundDataSet = (H5CompoundDS) dataset;
chomboMesh.metricsColumnNames = compoundDataSet.getMemberNames();
int c = -1;
int[] index = (int[]) vectValues.get(++c);
int[] i = (int[]) vectValues.get(++c);
int[] j = (int[]) vectValues.get(++c);
int[] k = null;
if (chomboMesh.dimension == 3) {
k = (int[]) vectValues.get(++c);
}
double[] x = (double[]) vectValues.get(++c);
double[] y = (double[]) vectValues.get(++c);
double[] z = null;
if (chomboMesh.dimension == 3) {
z = (double[]) vectValues.get(++c);
}
double[] normalx = (double[]) vectValues.get(++c);
double[] normaly = (double[]) vectValues.get(++c);
double[] normalz = null;
if (chomboMesh.dimension == 3) {
normalz = (double[]) vectValues.get(++c);
}
double[] volFrac = (double[]) vectValues.get(++c);
double[] areaFrac = (double[]) vectValues.get(++c);
for (int n = 0; n < index.length; ++n) {
ChomboMeshMetricsEntry entry = new ChomboMeshMetricsEntry(index[n], i[n], j[n], k == null ? 0 : k[n], x[n], y[n], z == null ? 0 : z[n], normalx[n], normaly[n], normalz == null ? 0 : normalz[n], volFrac[n], areaFrac[n]);
chomboMesh.metrics.add(entry);
}
} else if (name.equals(SURFACE_DATASET)) {
// not needed right now
} else if (name.equals(SLICE_VIEW_DATASET)) {
// not needed right now
}
}
}
return chomboMesh;
}
use of ncsa.hdf.object.Dataset in project vcell by virtualcell.
the class H5FileStructure method printGroup.
/**
* Recursively print a group and its members.
*
* @throws Exception
*/
private static void printGroup(Group g, String indent) throws Exception {
if (g == null)
return;
java.util.List members = g.getMemberList();
int n = members.size();
indent += " ";
HObject obj = null;
for (int i = 0; i < n; i++) {
obj = (HObject) members.get(i);
System.out.println(indent + obj);
if (obj instanceof Group) {
printGroup((Group) obj, indent);
}
Dataset ds = BeanUtils.downcast(Dataset.class, obj);
if (ds != null && ds.getName().equals("elements")) {
// if (ds != null && ds.getName().equals("boundaries")) {
VH5Dataset vds = new VH5Dataset(ds);
vds.info();
vds.meta();
}
}
}
Aggregations