use of org.apache.hadoop.io.ArrayWritable in project carbondata by apache.
the class CarbonObjectInspector method getStructFieldsDataAsList.
@Override
public List<Object> getStructFieldsDataAsList(final Object data) {
if (data == null) {
return null;
}
if (data instanceof ArrayWritable) {
final ArrayWritable arr = (ArrayWritable) data;
final Object[] arrWritable = arr.get();
return new ArrayList<Object>(Arrays.asList(arrWritable));
}
throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
use of org.apache.hadoop.io.ArrayWritable in project carbondata by apache.
the class MapredCarbonInputFormat method getRecordReader.
@Override
public RecordReader<Void, ArrayWritable> getRecordReader(InputSplit inputSplit, JobConf jobConf, Reporter reporter) throws IOException {
QueryModel queryModel = getQueryModel(jobConf);
CarbonReadSupport<ArrayWritable> readSupport = getReadSupportClass(jobConf);
return new CarbonHiveRecordReader(queryModel, readSupport, inputSplit, jobConf);
}
use of org.apache.hadoop.io.ArrayWritable in project carbondata by apache.
the class CarbonArrayInspector method getListLength.
@Override
public int getListLength(final Object data) {
if (data == null) {
return -1;
}
if (data instanceof ArrayWritable) {
final Writable[] listContainer = ((ArrayWritable) data).get();
if (listContainer == null || listContainer.length == 0) {
return -1;
}
final Writable subObj = listContainer[0];
if (subObj == null) {
return 0;
}
return ((ArrayWritable) subObj).get().length;
}
throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
use of org.apache.hadoop.io.ArrayWritable in project carbondata by apache.
the class CarbonHiveRecordReader method createStruct.
public ArrayWritable createStruct(Object obj, StructObjectInspector inspector) throws SerDeException {
List fields = inspector.getAllStructFieldRefs();
Writable[] arr = new Writable[fields.size()];
for (int i = 0; i < fields.size(); i++) {
StructField field = (StructField) fields.get(i);
Object subObj = inspector.getStructFieldData(obj, field);
ObjectInspector subInspector = field.getFieldObjectInspector();
arr[i] = createObject(subObj, subInspector);
}
return new ArrayWritable(Writable.class, arr);
}
use of org.apache.hadoop.io.ArrayWritable in project carbondata by apache.
the class CarbonHiveRecordReader method next.
@Override
public boolean next(Void aVoid, ArrayWritable value) throws IOException {
if (carbonIterator.hasNext()) {
Object obj = readSupport.readRow(carbonIterator.next());
ArrayWritable tmpValue = null;
try {
tmpValue = createArrayWritable(obj);
} catch (SerDeException se) {
throw new IOException(se.getMessage(), se.getCause());
}
if (value != tmpValue) {
final Writable[] arrValue = value.get();
final Writable[] arrCurrent = tmpValue.get();
if (valueObj != null && arrValue.length == arrCurrent.length) {
System.arraycopy(arrCurrent, 0, arrValue, 0, arrCurrent.length);
} else {
if (arrValue.length != arrCurrent.length) {
throw new IOException("CarbonHiveInput : size of object differs. Value" + " size : " + arrValue.length + ", Current Object size : " + arrCurrent.length);
} else {
throw new IOException("CarbonHiveInput can not support RecordReaders that" + " don't return same key & value & value is null");
}
}
}
return true;
} else {
return false;
}
}
Aggregations