Search in sources :

Example 56 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class SparkPlanGenerator method generateMapInput.

@SuppressWarnings("unchecked")
private MapInput generateMapInput(SparkPlan sparkPlan, MapWork mapWork) throws Exception {
    JobConf jobConf = cloneJobConf(mapWork);
    Class ifClass = getInputFormat(jobConf, mapWork);
    JavaPairRDD<WritableComparable, Writable> hadoopRDD;
    if (mapWork.getNumMapTasks() != null) {
        jobConf.setNumMapTasks(mapWork.getNumMapTasks());
        hadoopRDD = sc.hadoopRDD(jobConf, ifClass, WritableComparable.class, Writable.class, mapWork.getNumMapTasks());
    } else {
        hadoopRDD = sc.hadoopRDD(jobConf, ifClass, WritableComparable.class, Writable.class);
    }
    // Caching is disabled for MapInput due to HIVE-8920
    MapInput result = new MapInput(sparkPlan, hadoopRDD, false);
    return result;
}
Also used : WritableComparable(org.apache.hadoop.io.WritableComparable) Writable(org.apache.hadoop.io.Writable) JobConf(org.apache.hadoop.mapred.JobConf)

Example 57 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class StandardParquetHiveMapInspector method getMapValueElement.

@Override
public Object getMapValueElement(final Object data, final Object key) {
    if (data == null || key == null) {
        return null;
    }
    if (data instanceof ArrayWritable) {
        final Writable[] mapArray = ((ArrayWritable) data).get();
        if (mapArray == null || mapArray.length == 0) {
            return null;
        }
        for (final Writable obj : mapArray) {
            final ArrayWritable mapObj = (ArrayWritable) obj;
            final Writable[] arr = mapObj.get();
            if (key.equals(arr[0])) {
                return arr[1];
            }
        }
        return null;
    }
    if (data instanceof Map) {
        return ((Map) data).get(key);
    }
    throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) Writable(org.apache.hadoop.io.Writable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) Map(java.util.Map)

Example 58 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class DeepParquetHiveMapInspector method getMapValueElement.

@Override
public Object getMapValueElement(final Object data, final Object key) {
    if (data == null || key == null) {
        return null;
    }
    if (data instanceof ArrayWritable) {
        final Writable[] mapArray = ((ArrayWritable) data).get();
        if (mapArray == null || mapArray.length == 0) {
            return null;
        }
        for (final Writable obj : mapArray) {
            final ArrayWritable mapObj = (ArrayWritable) obj;
            final Writable[] arr = mapObj.get();
            if (key.equals(arr[0]) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveJavaObject(arr[0])) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveWritableObject(arr[0]))) {
                return arr[1];
            }
        }
        return null;
    }
    if (data instanceof Map) {
        final Map<?, ?> map = (Map<?, ?>) data;
        if (map.containsKey(key)) {
            return map.get(key);
        }
        for (final Map.Entry<?, ?> entry : map.entrySet()) {
            if (key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveJavaObject(entry.getKey())) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveWritableObject(entry.getKey()))) {
                return entry.getValue();
            }
        }
        return null;
    }
    throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) Writable(org.apache.hadoop.io.Writable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) Map(java.util.Map)

Example 59 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class ParquetHiveArrayInspector method getList.

@Override
public List<?> getList(final Object data) {
    if (data == null) {
        return null;
    }
    if (data instanceof ArrayWritable) {
        final Writable[] array = ((ArrayWritable) data).get();
        if (array == null) {
            return null;
        }
        final List<Writable> list = new ArrayList<Writable>(array.length);
        for (final Writable obj : array) {
            list.add(obj);
        }
        return list;
    }
    if (data instanceof List) {
        return (List<?>) data;
    }
    throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
Also used : ArrayWritable(org.apache.hadoop.io.ArrayWritable) ArrayList(java.util.ArrayList) Writable(org.apache.hadoop.io.Writable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) List(java.util.List) ArrayList(java.util.ArrayList)

Example 60 with Writable

use of org.apache.hadoop.io.Writable in project hive by apache.

the class SparkPartitionPruningSinkOperator method process.

@Override
public void process(Object row, int tag) throws HiveException {
    ObjectInspector rowInspector = inputObjInspectors[0];
    try {
        Writable writableRow = serializer.serialize(row, rowInspector);
        writableRow.write(buffer);
    } catch (Exception e) {
        throw new HiveException(e);
    }
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) Writable(org.apache.hadoop.io.Writable) IOException(java.io.IOException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Aggregations

Writable (org.apache.hadoop.io.Writable)221 IntWritable (org.apache.hadoop.io.IntWritable)103 LongWritable (org.apache.hadoop.io.LongWritable)91 BooleanWritable (org.apache.hadoop.io.BooleanWritable)75 BytesWritable (org.apache.hadoop.io.BytesWritable)74 FloatWritable (org.apache.hadoop.io.FloatWritable)73 Test (org.junit.Test)68 IOException (java.io.IOException)43 Path (org.apache.hadoop.fs.Path)43 Text (org.apache.hadoop.io.Text)40 ArrayWritable (org.apache.hadoop.io.ArrayWritable)37 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)34 SequenceFile (org.apache.hadoop.io.SequenceFile)32 Configuration (org.apache.hadoop.conf.Configuration)31 DoubleWritable (org.apache.hadoop.io.DoubleWritable)30 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)29 ByteWritable (org.apache.hadoop.io.ByteWritable)28 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)25 FileSystem (org.apache.hadoop.fs.FileSystem)24 ArrayList (java.util.ArrayList)23