use of org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector in project flink by apache.
the class HiveInspectors method toFlinkObject.
/**
* Converts a Hive object to Flink object with an ObjectInspector.
*/
public static Object toFlinkObject(ObjectInspector inspector, Object data, HiveShim hiveShim) {
if (data == null || inspector instanceof VoidObjectInspector) {
return null;
}
if (inspector instanceof PrimitiveObjectInspector) {
if (inspector instanceof BooleanObjectInspector || inspector instanceof StringObjectInspector || inspector instanceof ByteObjectInspector || inspector instanceof ShortObjectInspector || inspector instanceof IntObjectInspector || inspector instanceof LongObjectInspector || inspector instanceof FloatObjectInspector || inspector instanceof DoubleObjectInspector || inspector instanceof BinaryObjectInspector) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
return poi.getPrimitiveJavaObject(data);
} else if (inspector instanceof DateObjectInspector) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
return hiveShim.toFlinkDate(poi.getPrimitiveJavaObject(data));
} else if (inspector instanceof TimestampObjectInspector) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
return hiveShim.toFlinkTimestamp(poi.getPrimitiveJavaObject(data));
} else if (inspector instanceof HiveCharObjectInspector) {
HiveCharObjectInspector oi = (HiveCharObjectInspector) inspector;
return oi.getPrimitiveJavaObject(data).getValue();
} else if (inspector instanceof HiveVarcharObjectInspector) {
HiveVarcharObjectInspector oi = (HiveVarcharObjectInspector) inspector;
return oi.getPrimitiveJavaObject(data).getValue();
} else if (inspector instanceof HiveDecimalObjectInspector) {
HiveDecimalObjectInspector oi = (HiveDecimalObjectInspector) inspector;
return oi.getPrimitiveJavaObject(data).bigDecimalValue();
}
}
if (inspector instanceof ListObjectInspector) {
ListObjectInspector listInspector = (ListObjectInspector) inspector;
List<?> list = listInspector.getList(data);
if (list == null) {
return null;
}
// flink expects a specific array type (e.g. Integer[] instead of Object[]), so we have
// to get the element class
ObjectInspector elementInspector = listInspector.getListElementObjectInspector();
Object[] result = (Object[]) Array.newInstance(HiveTypeUtil.toFlinkType(elementInspector).getConversionClass(), list.size());
for (int i = 0; i < list.size(); i++) {
result[i] = toFlinkObject(elementInspector, list.get(i), hiveShim);
}
return result;
}
if (inspector instanceof MapObjectInspector) {
MapObjectInspector mapInspector = (MapObjectInspector) inspector;
Map<?, ?> map = mapInspector.getMap(data);
if (map == null) {
return null;
}
Map<Object, Object> result = new HashMap<>(map.size());
for (Map.Entry<?, ?> entry : map.entrySet()) {
result.put(toFlinkObject(mapInspector.getMapKeyObjectInspector(), entry.getKey(), hiveShim), toFlinkObject(mapInspector.getMapValueObjectInspector(), entry.getValue(), hiveShim));
}
return result;
}
if (inspector instanceof StructObjectInspector) {
StructObjectInspector structInspector = (StructObjectInspector) inspector;
List<? extends StructField> fields = structInspector.getAllStructFieldRefs();
Row row = new Row(fields.size());
// list as data
if (!data.getClass().isArray() && !(data instanceof List) && (inspector instanceof StandardStructObjectInspector)) {
data = new Object[] { data };
}
for (int i = 0; i < row.getArity(); i++) {
row.setField(i, toFlinkObject(fields.get(i).getFieldObjectInspector(), structInspector.getStructFieldData(data, fields.get(i)), hiveShim));
}
return row;
}
throw new FlinkHiveUDFException(String.format("Unwrap does not support ObjectInspector '%s' yet", inspector));
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector in project hive by apache.
the class ColumnStatisticsObjTranslator method unpackDateStats.
private static void unpackDateStats(ObjectInspector oi, Object o, ColumnStatsField csf, ColumnStatisticsObj statsObj) {
switch(csf) {
case COUNT_NULLS:
long cn = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getDateStats().setNumNulls(cn);
break;
case MIN:
DateWritableV2 min = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
statsObj.getStatsData().getDateStats().setLowValue(new Date(min.getDays()));
break;
case MAX:
DateWritableV2 max = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
statsObj.getStatsData().getDateStats().setHighValue(new Date(max.getDays()));
break;
case NDV:
long ndv = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getDateStats().setNumDVs(ndv);
break;
case BITVECTOR:
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
byte[] buf = ((BinaryObjectInspector) poi).getPrimitiveJavaObject(o);
statsObj.getStatsData().getDateStats().setBitVectors(buf);
break;
default:
throw new RuntimeException("Unsupported column stat for DATE : " + csf);
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector in project hive by apache.
the class ColumnStatisticsObjTranslator method unpackLongStats.
private static void unpackLongStats(ObjectInspector oi, Object o, ColumnStatsField csf, ColumnStatisticsObj statsObj) {
switch(csf) {
case COUNT_NULLS:
long cn = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getLongStats().setNumNulls(cn);
break;
case MIN:
long min = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getLongStats().setLowValue(min);
break;
case MAX:
long max = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getLongStats().setHighValue(max);
break;
case NDV:
long ndv = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getLongStats().setNumDVs(ndv);
break;
case BITVECTOR:
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
byte[] buf = ((BinaryObjectInspector) poi).getPrimitiveJavaObject(o);
statsObj.getStatsData().getLongStats().setBitVectors(buf);
break;
default:
throw new RuntimeException("Unsupported column stat for LONG : " + csf);
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector in project hive by apache.
the class ColumnStatisticsObjTranslator method unpackDoubleStats.
private static void unpackDoubleStats(ObjectInspector oi, Object o, ColumnStatsField csf, ColumnStatisticsObj statsObj) throws UnsupportedDoubleException {
switch(csf) {
case COUNT_NULLS:
long cn = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getDoubleStats().setNumNulls(cn);
break;
case MIN:
double min = ((DoubleObjectInspector) oi).get(o);
if (Double.isInfinite(min) || Double.isNaN(min)) {
throw new UnsupportedDoubleException();
}
statsObj.getStatsData().getDoubleStats().setLowValue(min);
break;
case MAX:
double max = ((DoubleObjectInspector) oi).get(o);
if (Double.isInfinite(max) || Double.isNaN(max)) {
throw new UnsupportedDoubleException();
}
statsObj.getStatsData().getDoubleStats().setHighValue(max);
break;
case NDV:
long ndv = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getDoubleStats().setNumDVs(ndv);
break;
case BITVECTOR:
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
byte[] buf = ((BinaryObjectInspector) poi).getPrimitiveJavaObject(o);
statsObj.getStatsData().getDoubleStats().setBitVectors(buf);
break;
default:
throw new RuntimeException("Unsupported column stat for DOUBLE : " + csf);
}
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector in project hive by apache.
the class ColumnStatisticsObjTranslator method unpackStringStats.
private static void unpackStringStats(ObjectInspector oi, Object o, ColumnStatsField csf, ColumnStatisticsObj statsObj) {
switch(csf) {
case COUNT_NULLS:
long cn = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getStringStats().setNumNulls(cn);
break;
case NDV:
long ndv = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getStringStats().setNumDVs(ndv);
break;
case BITVECTOR:
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
byte[] buf = ((BinaryObjectInspector) poi).getPrimitiveJavaObject(o);
statsObj.getStatsData().getStringStats().setBitVectors(buf);
break;
case MAX_LENGTH:
long max = ((LongObjectInspector) oi).get(o);
statsObj.getStatsData().getStringStats().setMaxColLen(max);
break;
case AVG_LENGTH:
double avg = ((DoubleObjectInspector) oi).get(o);
statsObj.getStatsData().getStringStats().setAvgColLen(avg);
break;
default:
throw new RuntimeException("Unsupported column stat for STRING : " + csf);
}
}
Aggregations