Search in sources :

Example 66 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project carbondata by apache.

the class CarbonObjectInspector method getStructFieldsDataAsList.

@Override
public List<Object> getStructFieldsDataAsList(final Object data) {
    if (data == null) {
        return null;
    }
    if (data instanceof ArrayWritable) {
        final ArrayWritable arr = (ArrayWritable) data;
        final Object[] arrWritable = arr.get();
        return new ArrayList<Object>(Arrays.asList(arrWritable));
    }
    throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) ArrayList(java.util.ArrayList)

Example 67 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project carbondata by apache.

the class MapredCarbonInputFormat method getRecordReader.

@Override
public RecordReader<Void, ArrayWritable> getRecordReader(InputSplit inputSplit, JobConf jobConf, Reporter reporter) throws IOException {
    QueryModel queryModel = getQueryModel(jobConf);
    CarbonReadSupport<ArrayWritable> readSupport = getReadSupportClass(jobConf);
    return new CarbonHiveRecordReader(queryModel, readSupport, inputSplit, jobConf);
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) QueryModel(org.apache.carbondata.core.scan.model.QueryModel)

Example 68 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project carbondata by apache.

the class CarbonArrayInspector method getListLength.

@Override
public int getListLength(final Object data) {
    if (data == null) {
        return -1;
    }
    if (data instanceof ArrayWritable) {
        final Writable[] listContainer = ((ArrayWritable) data).get();
        if (listContainer == null || listContainer.length == 0) {
            return -1;
        }
        final Writable subObj = listContainer[0];
        if (subObj == null) {
            return 0;
        }
        return ((ArrayWritable) subObj).get().length;
    }
    throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) Writable(org.apache.hadoop.io.Writable) ArrayWritable(org.apache.hadoop.io.ArrayWritable)

Example 69 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project carbondata by apache.

the class CarbonHiveRecordReader method createStruct.

public ArrayWritable createStruct(Object obj, StructObjectInspector inspector) throws SerDeException {
    List fields = inspector.getAllStructFieldRefs();
    Writable[] arr = new Writable[fields.size()];
    for (int i = 0; i < fields.size(); i++) {
        StructField field = (StructField) fields.get(i);
        Object subObj = inspector.getStructFieldData(obj, field);
        ObjectInspector subInspector = field.getFieldObjectInspector();
        arr[i] = createObject(subObj, subInspector);
    }
    return new ArrayWritable(Writable.class, arr);
}
Also used : ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) ArrayWritable(org.apache.hadoop.io.ArrayWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) ArrayList(java.util.ArrayList) List(java.util.List)

Example 70 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project carbondata by apache.

the class CarbonHiveRecordReader method next.

@Override
public boolean next(Void aVoid, ArrayWritable value) throws IOException {
    if (carbonIterator.hasNext()) {
        Object obj = readSupport.readRow(carbonIterator.next());
        ArrayWritable tmpValue = null;
        try {
            tmpValue = createArrayWritable(obj);
        } catch (SerDeException se) {
            throw new IOException(se.getMessage(), se.getCause());
        }
        if (value != tmpValue) {
            final Writable[] arrValue = value.get();
            final Writable[] arrCurrent = tmpValue.get();
            if (valueObj != null && arrValue.length == arrCurrent.length) {
                System.arraycopy(arrCurrent, 0, arrValue, 0, arrCurrent.length);
            } else {
                if (arrValue.length != arrCurrent.length) {
                    throw new IOException("CarbonHiveInput : size of object differs. Value" + " size :  " + arrValue.length + ", Current Object size : " + arrCurrent.length);
                } else {
                    throw new IOException("CarbonHiveInput can not support RecordReaders that" + " don't return same key & value & value is null");
                }
            }
        }
        return true;
    } else {
        return false;
    }
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) IOException(java.io.IOException) SerDeException(org.apache.hadoop.hive.serde2.SerDeException)

Aggregations

ArrayWritable (org.apache.hadoop.io.ArrayWritable)72 Test (org.junit.Test)41 IntWritable (org.apache.hadoop.io.IntWritable)31 Writable (org.apache.hadoop.io.Writable)29 Path (org.apache.hadoop.fs.Path)18 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)18 LongWritable (org.apache.hadoop.io.LongWritable)18 RecordConsumer (org.apache.parquet.io.api.RecordConsumer)18 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)15 ArrayList (java.util.ArrayList)13 BytesWritable (org.apache.hadoop.io.BytesWritable)10 List (java.util.List)9 BooleanWritable (org.apache.hadoop.io.BooleanWritable)8 FloatWritable (org.apache.hadoop.io.FloatWritable)8 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)6 NullWritable (org.apache.hadoop.io.NullWritable)6 Text (org.apache.hadoop.io.Text)6 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)5 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)5 MapWritable (org.apache.hadoop.io.MapWritable)5