Search in sources :

Example 26 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project hive by apache.

the class StandardParquetHiveMapInspector method getMapValueElement.

@Override
public Object getMapValueElement(final Object data, final Object key) {
    if (data == null || key == null) {
        return null;
    }
    if (data instanceof ArrayWritable) {
        final Writable[] mapArray = ((ArrayWritable) data).get();
        if (mapArray == null || mapArray.length == 0) {
            return null;
        }
        for (final Writable obj : mapArray) {
            final ArrayWritable mapObj = (ArrayWritable) obj;
            final Writable[] arr = mapObj.get();
            if (key.equals(arr[0])) {
                return arr[1];
            }
        }
        return null;
    }
    if (data instanceof Map) {
        return ((Map) data).get(key);
    }
    throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) Writable(org.apache.hadoop.io.Writable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) Map(java.util.Map)

Example 27 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project hive by apache.

the class ArrayWritableObjectInspector method getStructFieldData.

@Override
public Object getStructFieldData(final Object data, final StructField fieldRef) {
    if (data == null) {
        return null;
    }
    if (data instanceof ArrayWritable) {
        final ArrayWritable arr = (ArrayWritable) data;
        final StructFieldImpl structField = (StructFieldImpl) fieldRef;
        int index = isRoot ? structField.getIndex() : structField.adjustedIndex;
        if (index < arr.get().length) {
            return arr.get()[index];
        } else {
            return null;
        }
    }
    //is something else.
    if (data instanceof List) {
        return ((List) data).get(((StructFieldImpl) fieldRef).getIndex());
    }
    throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) ArrayList(java.util.ArrayList) List(java.util.List)

Example 28 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project hive by apache.

the class DeepParquetHiveMapInspector method getMapValueElement.

@Override
public Object getMapValueElement(final Object data, final Object key) {
    if (data == null || key == null) {
        return null;
    }
    if (data instanceof ArrayWritable) {
        final Writable[] mapArray = ((ArrayWritable) data).get();
        if (mapArray == null || mapArray.length == 0) {
            return null;
        }
        for (final Writable obj : mapArray) {
            final ArrayWritable mapObj = (ArrayWritable) obj;
            final Writable[] arr = mapObj.get();
            if (key.equals(arr[0]) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveJavaObject(arr[0])) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveWritableObject(arr[0]))) {
                return arr[1];
            }
        }
        return null;
    }
    if (data instanceof Map) {
        final Map<?, ?> map = (Map<?, ?>) data;
        if (map.containsKey(key)) {
            return map.get(key);
        }
        for (final Map.Entry<?, ?> entry : map.entrySet()) {
            if (key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveJavaObject(entry.getKey())) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveWritableObject(entry.getKey()))) {
                return entry.getValue();
            }
        }
        return null;
    }
    throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) Writable(org.apache.hadoop.io.Writable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) Map(java.util.Map)

Example 29 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project hive by apache.

the class ParquetHiveArrayInspector method getList.

@Override
public List<?> getList(final Object data) {
    if (data == null) {
        return null;
    }
    if (data instanceof ArrayWritable) {
        final Writable[] array = ((ArrayWritable) data).get();
        if (array == null) {
            return null;
        }
        final List<Writable> list = new ArrayList<Writable>(array.length);
        for (final Writable obj : array) {
            list.add(obj);
        }
        return list;
    }
    if (data instanceof List) {
        return (List<?>) data;
    }
    throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) ArrayList(java.util.ArrayList) Writable(org.apache.hadoop.io.Writable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) List(java.util.List) ArrayList(java.util.ArrayList)

Example 30 with ArrayWritable

use of org.apache.hadoop.io.ArrayWritable in project hive by apache.

the class TypedBytesWritableInput method readArray.

public ArrayWritable readArray(ArrayWritable aw) throws IOException {
    if (aw == null) {
        aw = new ArrayWritable(TypedBytesWritable.class);
    } else if (!aw.getValueClass().equals(TypedBytesWritable.class)) {
        throw new RuntimeException("value class has to be TypedBytesWritable");
    }
    int length = in.readVectorHeader();
    Writable[] writables = new Writable[length];
    for (int i = 0; i < length; i++) {
        writables[i] = new TypedBytesWritable(in.readRaw());
    }
    aw.set(writables);
    return aw;
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) NullWritable(org.apache.hadoop.io.NullWritable) VLongWritable(org.apache.hadoop.io.VLongWritable) Writable(org.apache.hadoop.io.Writable) MapWritable(org.apache.hadoop.io.MapWritable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) SortedMapWritable(org.apache.hadoop.io.SortedMapWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) VIntWritable(org.apache.hadoop.io.VIntWritable) FloatWritable(org.apache.hadoop.io.FloatWritable)

Aggregations

ArrayWritable (org.apache.hadoop.io.ArrayWritable)72 Test (org.junit.Test)41 IntWritable (org.apache.hadoop.io.IntWritable)31 Writable (org.apache.hadoop.io.Writable)29 Path (org.apache.hadoop.fs.Path)18 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)18 LongWritable (org.apache.hadoop.io.LongWritable)18 RecordConsumer (org.apache.parquet.io.api.RecordConsumer)18 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)15 ArrayList (java.util.ArrayList)13 BytesWritable (org.apache.hadoop.io.BytesWritable)10 List (java.util.List)9 BooleanWritable (org.apache.hadoop.io.BooleanWritable)8 FloatWritable (org.apache.hadoop.io.FloatWritable)8 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)6 NullWritable (org.apache.hadoop.io.NullWritable)6 Text (org.apache.hadoop.io.Text)6 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)5 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)5 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)5