use of io.jhdf.api.Attribute in project drill by apache.
the class HDF5BatchReader method getAttributes.
/**
* Gets the attributes of a HDF5 dataset and returns them into a HashMap
*
* @param path The path for which you wish to retrieve attributes
* @return Map The attributes for the given path. Empty Map if no attributes present
*/
private Map<String, HDF5Attribute> getAttributes(String path) {
Map<String, Attribute> attributeList;
// Remove trailing slashes
if (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
logger.debug("Getting attributes for {}", path);
Map<String, HDF5Attribute> attributes = new HashMap<>();
Node theNode;
try {
theNode = hdfFile.getByPath(path);
} catch (Exception e) {
// Couldn't find node
logger.debug("Couldn't get attributes for path: {}", path);
logger.debug("Error: {}", e.getMessage());
return attributes;
}
try {
attributeList = theNode.getAttributes();
} catch (HdfException e) {
logger.warn("Unable to get attributes for {}: Only Huge objects BTrees with 1 record are currently supported.", path);
return attributes;
}
logger.debug("Found {} attribtutes for {}", attributeList.size(), path);
for (Map.Entry<String, Attribute> attributeEntry : attributeList.entrySet()) {
HDF5Attribute attribute = HDF5Utils.getAttribute(path, attributeEntry.getKey(), hdfFile);
// Ignore compound attributes.
if (attribute != null && attributeEntry.getValue().isScalar()) {
logger.debug("Adding {} to attribute list for {}", attribute.getKey(), path);
attributes.put(attribute.getKey(), attribute);
}
}
return attributes;
}
use of io.jhdf.api.Attribute in project drill by apache.
the class HDF5Utils method getAttribute.
/**
* This function returns and HDF5Attribute object for use when Drill maps the attributes.
*
* @param pathName The path to retrieve attributes from
* @param key The key for the specific attribute you are retrieving
* @param hdf5File The hdfFile reader object for the file you are querying
* @return HDF5Attribute The attribute from the path with the key that was requested.
*/
public static HDF5Attribute getAttribute(String pathName, String key, HdfFile hdf5File) {
if (pathName.equals("")) {
pathName = "/";
}
if (hdf5File.getByPath(pathName) == null) {
return null;
}
if (key.equals("dimensions")) {
int[] dimensions = hdf5File.getDatasetByPath(pathName).getDimensions();
ArrayUtils.reverse(dimensions);
return new HDF5Attribute(MinorType.LIST, "dimensions", dimensions);
}
if (key.equals("dataType")) {
String typeName = hdf5File.getDatasetByPath(pathName).getDataType().getJavaType().getName();
return new HDF5Attribute(getDataType(hdf5File.getDatasetByPath(pathName).getDataType()), "DataType", typeName);
}
if (hdf5File.getByPath(pathName).getAttribute(key) == null) {
return null;
}
Attribute attribute = hdf5File.getByPath(pathName).getAttribute(key);
Class<?> type = hdf5File.getByPath(pathName).getAttribute(key).getJavaType();
if (type.isAssignableFrom(long[].class)) {
return new HDF5Attribute(MinorType.BIGINT, key, attribute.getData(), true);
} else if (type.isAssignableFrom(int[].class)) {
return new HDF5Attribute(MinorType.INT, key, attribute.getData(), true);
} else if (type.isAssignableFrom(short[].class)) {
return new HDF5Attribute(MinorType.INT, key, attribute.getData(), true);
} else if (type.isAssignableFrom(byte[].class)) {
return new HDF5Attribute(MinorType.INT, key, attribute.getData(), true);
} else if (type.isAssignableFrom(double[].class)) {
return new HDF5Attribute(MinorType.FLOAT8, key, attribute.getData(), true);
} else if (type.isAssignableFrom(float[].class)) {
return new HDF5Attribute(MinorType.FLOAT8, key, attribute.getData(), true);
} else if (type.isAssignableFrom(String[].class)) {
return new HDF5Attribute(MinorType.VARCHAR, key, attribute.getData(), true);
} else if (type.isAssignableFrom(java.lang.Long.class)) {
return new HDF5Attribute(MinorType.BIGINT, key, attribute.getData());
} else if (type.isAssignableFrom(java.lang.Integer.class)) {
return new HDF5Attribute(MinorType.INT, key, attribute.getData());
} else if (type.isAssignableFrom(java.lang.Short.class)) {
return new HDF5Attribute(MinorType.INT, key, attribute.getData());
} else if (type.isAssignableFrom(java.lang.Byte.class)) {
return new HDF5Attribute(MinorType.INT, key, attribute.getData());
} else if (type.isAssignableFrom(java.lang.Double.class)) {
return new HDF5Attribute(MinorType.FLOAT8, key, attribute.getData());
} else if (type.isAssignableFrom(float.class)) {
return new HDF5Attribute(MinorType.FLOAT4, key, attribute.getData());
} else if (type.isAssignableFrom(String.class)) {
return new HDF5Attribute(MinorType.VARCHAR, key, attribute.getData());
} else if (type.isAssignableFrom(boolean.class)) {
return new HDF5Attribute(MinorType.BIT, key, attribute.getData());
} else /*else if (type.isAssignableFrom(HDF5EnumerationValue.class)) {
// Convert HDF5 Enum to String
return new HDF5Attribute(MinorType.GENERIC_OBJECT, key, attribute.getData());
}*/
if (type.isAssignableFrom(BitSet.class)) {
return new HDF5Attribute(MinorType.BIT, key, attribute.getData());
}
logger.warn("Reading attributes of type {} not yet implemented.", attribute.getJavaType());
return null;
}
Aggregations