use of org.apache.hadoop.io.ArrayWritable in project carbondata by apache.
the class CarbonHiveRecordReader method createStruct.
public ArrayWritable createStruct(Object obj, StructObjectInspector inspector) throws SerDeException {
List fields = inspector.getAllStructFieldRefs();
Writable[] arr = new Writable[fields.size()];
for (int i = 0; i < fields.size(); i++) {
StructField field = (StructField) fields.get(i);
Object subObj = inspector.getStructFieldData(obj, field);
ObjectInspector subInspector = field.getFieldObjectInspector();
arr[i] = createObject(subObj, subInspector);
}
return new ArrayWritable(Writable.class, arr);
}
use of org.apache.hadoop.io.ArrayWritable in project carbondata by apache.
the class CarbonHiveRecordReader method next.
@Override
public boolean next(Void aVoid, ArrayWritable value) throws IOException {
if (carbonIterator.hasNext()) {
Object obj = readSupport.readRow(carbonIterator.next());
ArrayWritable tmpValue = null;
try {
tmpValue = createArrayWritable(obj);
} catch (SerDeException se) {
throw new IOException(se.getMessage(), se.getCause());
}
if (value != tmpValue) {
final Writable[] arrValue = value.get();
final Writable[] arrCurrent = tmpValue.get();
if (valueObj != null && arrValue.length == arrCurrent.length) {
System.arraycopy(arrCurrent, 0, arrValue, 0, arrCurrent.length);
} else {
if (arrValue.length != arrCurrent.length) {
throw new IOException("CarbonHiveInput : size of object differs. Value" + " size : " + arrValue.length + ", Current Object size : " + arrCurrent.length);
} else {
throw new IOException("CarbonHiveInput can not support RecordReaders that" + " don't return same key & value & value is null");
}
}
}
return true;
} else {
return false;
}
}
Aggregations