use of org.apache.hadoop.io.Writable in project hive by apache.
the class SparkPlanGenerator method generateMapInput.
@SuppressWarnings("unchecked")
private MapInput generateMapInput(SparkPlan sparkPlan, MapWork mapWork) throws Exception {
JobConf jobConf = cloneJobConf(mapWork);
Class ifClass = getInputFormat(jobConf, mapWork);
JavaPairRDD<WritableComparable, Writable> hadoopRDD;
if (mapWork.getNumMapTasks() != null) {
jobConf.setNumMapTasks(mapWork.getNumMapTasks());
hadoopRDD = sc.hadoopRDD(jobConf, ifClass, WritableComparable.class, Writable.class, mapWork.getNumMapTasks());
} else {
hadoopRDD = sc.hadoopRDD(jobConf, ifClass, WritableComparable.class, Writable.class);
}
// Caching is disabled for MapInput due to HIVE-8920
MapInput result = new MapInput(sparkPlan, hadoopRDD, false);
return result;
}
use of org.apache.hadoop.io.Writable in project hive by apache.
the class StandardParquetHiveMapInspector method getMapValueElement.
@Override
public Object getMapValueElement(final Object data, final Object key) {
if (data == null || key == null) {
return null;
}
if (data instanceof ArrayWritable) {
final Writable[] mapArray = ((ArrayWritable) data).get();
if (mapArray == null || mapArray.length == 0) {
return null;
}
for (final Writable obj : mapArray) {
final ArrayWritable mapObj = (ArrayWritable) obj;
final Writable[] arr = mapObj.get();
if (key.equals(arr[0])) {
return arr[1];
}
}
return null;
}
if (data instanceof Map) {
return ((Map) data).get(key);
}
throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
use of org.apache.hadoop.io.Writable in project hive by apache.
the class DeepParquetHiveMapInspector method getMapValueElement.
@Override
public Object getMapValueElement(final Object data, final Object key) {
if (data == null || key == null) {
return null;
}
if (data instanceof ArrayWritable) {
final Writable[] mapArray = ((ArrayWritable) data).get();
if (mapArray == null || mapArray.length == 0) {
return null;
}
for (final Writable obj : mapArray) {
final ArrayWritable mapObj = (ArrayWritable) obj;
final Writable[] arr = mapObj.get();
if (key.equals(arr[0]) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveJavaObject(arr[0])) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveWritableObject(arr[0]))) {
return arr[1];
}
}
return null;
}
if (data instanceof Map) {
final Map<?, ?> map = (Map<?, ?>) data;
if (map.containsKey(key)) {
return map.get(key);
}
for (final Map.Entry<?, ?> entry : map.entrySet()) {
if (key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveJavaObject(entry.getKey())) || key.equals(((PrimitiveObjectInspector) keyInspector).getPrimitiveWritableObject(entry.getKey()))) {
return entry.getValue();
}
}
return null;
}
throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
use of org.apache.hadoop.io.Writable in project hive by apache.
the class ParquetHiveArrayInspector method getList.
@Override
public List<?> getList(final Object data) {
if (data == null) {
return null;
}
if (data instanceof ArrayWritable) {
final Writable[] array = ((ArrayWritable) data).get();
if (array == null) {
return null;
}
final List<Writable> list = new ArrayList<Writable>(array.length);
for (final Writable obj : array) {
list.add(obj);
}
return list;
}
if (data instanceof List) {
return (List<?>) data;
}
throw new UnsupportedOperationException("Cannot inspect " + data.getClass().getCanonicalName());
}
use of org.apache.hadoop.io.Writable in project hive by apache.
the class SparkPartitionPruningSinkOperator method process.
@Override
public void process(Object row, int tag) throws HiveException {
ObjectInspector rowInspector = inputObjInspectors[0];
try {
Writable writableRow = serializer.serialize(row, rowInspector);
writableRow.write(buffer);
} catch (Exception e) {
throw new HiveException(e);
}
}
Aggregations