use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class StandardParquetHiveMapInspector method getMapValueElement.
@Override
public Object getMapValueElement(final Object data, final Object key) {
if (data == null || key == null) {
return null;
}
if (data instanceof ArrayWritable) {
final Writable[] mapArray = ((ArrayWritable) data).get();
if (mapArray == null || mapArray.length == 0) {
return null;
}
for (final Writable obj : mapArray) {
final ArrayWritable mapObj = (ArrayWritable) obj;
final Writable[] arr = mapObj.get();
if (key.equals(arr[0])) {
return arr[1];
}
}
return null;
}
if (data instanceof Map) {
return ((Map) data).get(key);
}
throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class ArrayWritableObjectInspector method getStructFieldData.
@Override
public Object getStructFieldData(final Object data, final StructField fieldRef) {
if (data == null) {
return null;
}
if (data instanceof ArrayWritable) {
final ArrayWritable arr = (ArrayWritable) data;
final StructFieldImpl structField = (StructFieldImpl) fieldRef;
int index = isRoot ? structField.getIndex() : structField.adjustedIndex;
if (index < arr.get().length) {
return arr.get()[index];
} else {
return null;
}
}
//is something else.
if (data instanceof List) {
return ((List) data).get(((StructFieldImpl) fieldRef).getIndex());
}
throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class DeepParquetHiveMapInspector method getMapValueElement.
@Override
public Object getMapValueElement(final Object data, final Object key) {
if (data == null || key == null) {
return null;
}
if (data instanceof ArrayWritable) {
final Writable[] mapArray = ((ArrayWritable) data).get();
if (mapArray == null || mapArray.length == 0) {
return null;
}
for (final Writable obj : mapArray) {
final ArrayWritable mapObj = (ArrayWritable) obj;
final Writable[] arr = mapObj.get();
if (key.equals(arr[0]) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveJavaObject(arr[0])) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveWritableObject(arr[0]))) {
return arr[1];
}
}
return null;
}
if (data instanceof Map) {
final Map<?, ?> map = (Map<?, ?>) data;
if (map.containsKey(key)) {
return map.get(key);
}
for (final Map.Entry<?, ?> entry : map.entrySet()) {
if (key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveJavaObject(entry.getKey())) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveWritableObject(entry.getKey()))) {
return entry.getValue();
}
}
return null;
}
throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class ParquetHiveArrayInspector method getList.
@Override
public List<?> getList(final Object data) {
if (data == null) {
return null;
}
if (data instanceof ArrayWritable) {
final Writable[] array = ((ArrayWritable) data).get();
if (array == null) {
return null;
}
final List<Writable> list = new ArrayList<Writable>(array.length);
for (final Writable obj : array) {
list.add(obj);
}
return list;
}
if (data instanceof List) {
return (List<?>) data;
}
throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
use of org.apache.hadoop.io.ArrayWritable in project hive by apache.
the class TypedBytesWritableInput method readArray.
public ArrayWritable readArray(ArrayWritable aw) throws IOException {
if (aw == null) {
aw = new ArrayWritable(TypedBytesWritable.class);
} else if (!aw.getValueClass().equals(TypedBytesWritable.class)) {
throw new RuntimeException("value class has to be TypedBytesWritable");
}
int length = in.readVectorHeader();
Writable[] writables = new Writable[length];
for (int i = 0; i < length; i++) {
writables[i] = new TypedBytesWritable(in.readRaw());
}
aw.set(writables);
return aw;
}
Aggregations