use of org.apache.hadoop.hive.common.type.HiveChar in project flink by apache.
the class HiveInspectors method getConversion.
/**
* Get conversion for converting Flink object to Hive object from an ObjectInspector and the
* corresponding Flink DataType.
*/
public static HiveObjectConversion getConversion(ObjectInspector inspector, LogicalType dataType, HiveShim hiveShim) {
if (inspector instanceof PrimitiveObjectInspector) {
HiveObjectConversion conversion;
if (inspector instanceof BooleanObjectInspector || inspector instanceof StringObjectInspector || inspector instanceof ByteObjectInspector || inspector instanceof ShortObjectInspector || inspector instanceof IntObjectInspector || inspector instanceof LongObjectInspector || inspector instanceof FloatObjectInspector || inspector instanceof DoubleObjectInspector || inspector instanceof BinaryObjectInspector || inspector instanceof VoidObjectInspector) {
conversion = IdentityConversion.INSTANCE;
} else if (inspector instanceof DateObjectInspector) {
conversion = hiveShim::toHiveDate;
} else if (inspector instanceof TimestampObjectInspector) {
conversion = hiveShim::toHiveTimestamp;
} else if (inspector instanceof HiveCharObjectInspector) {
conversion = o -> o == null ? null : new HiveChar((String) o, ((CharType) dataType).getLength());
} else if (inspector instanceof HiveVarcharObjectInspector) {
conversion = o -> o == null ? null : new HiveVarchar((String) o, ((VarCharType) dataType).getLength());
} else if (inspector instanceof HiveDecimalObjectInspector) {
conversion = o -> o == null ? null : HiveDecimal.create((BigDecimal) o);
} else {
throw new FlinkHiveUDFException("Unsupported primitive object inspector " + inspector.getClass().getName());
}
// currently this happens for constant arguments for UDFs
if (((PrimitiveObjectInspector) inspector).preferWritable()) {
conversion = new WritableHiveObjectConversion(conversion, hiveShim);
}
return conversion;
}
if (inspector instanceof ListObjectInspector) {
HiveObjectConversion eleConvert = getConversion(((ListObjectInspector) inspector).getListElementObjectInspector(), ((ArrayType) dataType).getElementType(), hiveShim);
return o -> {
if (o == null) {
return null;
}
Object[] array = (Object[]) o;
List<Object> result = new ArrayList<>();
for (Object ele : array) {
result.add(eleConvert.toHiveObject(ele));
}
return result;
};
}
if (inspector instanceof MapObjectInspector) {
MapObjectInspector mapInspector = (MapObjectInspector) inspector;
MapType kvType = (MapType) dataType;
HiveObjectConversion keyConversion = getConversion(mapInspector.getMapKeyObjectInspector(), kvType.getKeyType(), hiveShim);
HiveObjectConversion valueConversion = getConversion(mapInspector.getMapValueObjectInspector(), kvType.getValueType(), hiveShim);
return o -> {
if (o == null) {
return null;
}
Map<Object, Object> map = (Map) o;
Map<Object, Object> result = new HashMap<>(map.size());
for (Map.Entry<Object, Object> entry : map.entrySet()) {
result.put(keyConversion.toHiveObject(entry.getKey()), valueConversion.toHiveObject(entry.getValue()));
}
return result;
};
}
if (inspector instanceof StructObjectInspector) {
StructObjectInspector structInspector = (StructObjectInspector) inspector;
List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
List<RowType.RowField> rowFields = ((RowType) dataType).getFields();
HiveObjectConversion[] conversions = new HiveObjectConversion[structFields.size()];
for (int i = 0; i < structFields.size(); i++) {
conversions[i] = getConversion(structFields.get(i).getFieldObjectInspector(), rowFields.get(i).getType(), hiveShim);
}
return o -> {
if (o == null) {
return null;
}
Row row = (Row) o;
List<Object> result = new ArrayList<>(row.getArity());
for (int i = 0; i < row.getArity(); i++) {
result.add(conversions[i].toHiveObject(row.getField(i)));
}
return result;
};
}
throw new FlinkHiveUDFException(String.format("Flink doesn't support convert object conversion for %s yet", inspector));
}
use of org.apache.hadoop.hive.common.type.HiveChar in project flink by apache.
the class HiveShimV100 method javaToWritable.
Optional<Writable> javaToWritable(@Nonnull Object value) {
Writable writable = null;
// in case value is already a Writable
if (value instanceof Writable) {
writable = (Writable) value;
} else if (value instanceof Boolean) {
writable = new BooleanWritable((Boolean) value);
} else if (value instanceof Byte) {
writable = new ByteWritable((Byte) value);
} else if (value instanceof Short) {
writable = new ShortWritable((Short) value);
} else if (value instanceof Integer) {
writable = new IntWritable((Integer) value);
} else if (value instanceof Long) {
writable = new LongWritable((Long) value);
} else if (value instanceof Float) {
writable = new FloatWritable((Float) value);
} else if (value instanceof Double) {
writable = new DoubleWritable((Double) value);
} else if (value instanceof String) {
writable = new Text((String) value);
} else if (value instanceof HiveChar) {
writable = new HiveCharWritable((HiveChar) value);
} else if (value instanceof HiveVarchar) {
writable = new HiveVarcharWritable((HiveVarchar) value);
} else if (value instanceof HiveDecimal) {
writable = new HiveDecimalWritable((HiveDecimal) value);
} else if (value instanceof Date) {
writable = new DateWritable((Date) value);
} else if (value instanceof Timestamp) {
writable = new TimestampWritable((Timestamp) value);
} else if (value instanceof BigDecimal) {
HiveDecimal hiveDecimal = HiveDecimal.create((BigDecimal) value);
writable = new HiveDecimalWritable(hiveDecimal);
} else if (value instanceof byte[]) {
writable = new BytesWritable((byte[]) value);
}
return Optional.ofNullable(writable);
}
Aggregations