use of org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector in project flink by splunk.
the class HiveInspectors method getConversion.
/**
* Get conversion for converting Flink object to Hive object from an ObjectInspector and the
* corresponding Flink DataType.
*/
public static HiveObjectConversion getConversion(ObjectInspector inspector, LogicalType dataType, HiveShim hiveShim) {
if (inspector instanceof PrimitiveObjectInspector) {
HiveObjectConversion conversion;
if (inspector instanceof BooleanObjectInspector || inspector instanceof StringObjectInspector || inspector instanceof ByteObjectInspector || inspector instanceof ShortObjectInspector || inspector instanceof IntObjectInspector || inspector instanceof LongObjectInspector || inspector instanceof FloatObjectInspector || inspector instanceof DoubleObjectInspector || inspector instanceof BinaryObjectInspector || inspector instanceof VoidObjectInspector) {
conversion = IdentityConversion.INSTANCE;
} else if (inspector instanceof DateObjectInspector) {
conversion = hiveShim::toHiveDate;
} else if (inspector instanceof TimestampObjectInspector) {
conversion = hiveShim::toHiveTimestamp;
} else if (inspector instanceof HiveCharObjectInspector) {
conversion = o -> o == null ? null : new HiveChar((String) o, ((CharType) dataType).getLength());
} else if (inspector instanceof HiveVarcharObjectInspector) {
conversion = o -> o == null ? null : new HiveVarchar((String) o, ((VarCharType) dataType).getLength());
} else if (inspector instanceof HiveDecimalObjectInspector) {
conversion = o -> o == null ? null : HiveDecimal.create((BigDecimal) o);
} else {
throw new FlinkHiveUDFException("Unsupported primitive object inspector " + inspector.getClass().getName());
}
// currently this happens for constant arguments for UDFs
if (((PrimitiveObjectInspector) inspector).preferWritable()) {
conversion = new WritableHiveObjectConversion(conversion, hiveShim);
}
return conversion;
}
if (inspector instanceof ListObjectInspector) {
HiveObjectConversion eleConvert = getConversion(((ListObjectInspector) inspector).getListElementObjectInspector(), ((ArrayType) dataType).getElementType(), hiveShim);
return o -> {
if (o == null) {
return null;
}
Object[] array = (Object[]) o;
List<Object> result = new ArrayList<>();
for (Object ele : array) {
result.add(eleConvert.toHiveObject(ele));
}
return result;
};
}
if (inspector instanceof MapObjectInspector) {
MapObjectInspector mapInspector = (MapObjectInspector) inspector;
MapType kvType = (MapType) dataType;
HiveObjectConversion keyConversion = getConversion(mapInspector.getMapKeyObjectInspector(), kvType.getKeyType(), hiveShim);
HiveObjectConversion valueConversion = getConversion(mapInspector.getMapValueObjectInspector(), kvType.getValueType(), hiveShim);
return o -> {
if (o == null) {
return null;
}
Map<Object, Object> map = (Map) o;
Map<Object, Object> result = new HashMap<>(map.size());
for (Map.Entry<Object, Object> entry : map.entrySet()) {
result.put(keyConversion.toHiveObject(entry.getKey()), valueConversion.toHiveObject(entry.getValue()));
}
return result;
};
}
if (inspector instanceof StructObjectInspector) {
StructObjectInspector structInspector = (StructObjectInspector) inspector;
List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
List<RowType.RowField> rowFields = ((RowType) dataType).getFields();
HiveObjectConversion[] conversions = new HiveObjectConversion[structFields.size()];
for (int i = 0; i < structFields.size(); i++) {
conversions[i] = getConversion(structFields.get(i).getFieldObjectInspector(), rowFields.get(i).getType(), hiveShim);
}
return o -> {
if (o == null) {
return null;
}
Row row = (Row) o;
List<Object> result = new ArrayList<>(row.getArity());
for (int i = 0; i < row.getArity(); i++) {
result.add(conversions[i].toHiveObject(row.getField(i)));
}
return result;
};
}
throw new FlinkHiveUDFException(String.format("Flink doesn't support convert object conversion for %s yet", inspector));
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector in project ion-hive-serde by amzn.
the class AbstractTextSerializer method serialize.
@Override
public final void serialize(final IonWriter writer, final Object data, final ObjectInspector objectInspector) throws IOException {
String text = null;
final PrimitiveCategory category = ((PrimitiveObjectInspector) objectInspector).getPrimitiveCategory();
switch(category) {
case CHAR:
final HiveChar hiveChar = ((HiveCharObjectInspector) objectInspector).getPrimitiveJavaObject(data);
text = hiveChar.getStrippedValue();
break;
case STRING:
text = ((StringObjectInspector) objectInspector).getPrimitiveJavaObject(data);
break;
case VARCHAR:
final HiveVarchar hiveVarchar = ((HiveVarcharObjectInspector) objectInspector).getPrimitiveJavaObject(data);
text = hiveVarchar.getValue();
break;
default:
throw new IllegalArgumentException("Invalid object category for text serializer: " + category);
}
writeText(writer, text);
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector in project trino by trinodb.
the class SerDeUtils method serializePrimitive.
private static void serializePrimitive(Type type, BlockBuilder builder, Object object, PrimitiveObjectInspector inspector) {
requireNonNull(builder, "builder is null");
if (object == null) {
builder.appendNull();
return;
}
switch(inspector.getPrimitiveCategory()) {
case BOOLEAN:
type.writeBoolean(builder, ((BooleanObjectInspector) inspector).get(object));
return;
case BYTE:
type.writeLong(builder, ((ByteObjectInspector) inspector).get(object));
return;
case SHORT:
type.writeLong(builder, ((ShortObjectInspector) inspector).get(object));
return;
case INT:
type.writeLong(builder, ((IntObjectInspector) inspector).get(object));
return;
case LONG:
type.writeLong(builder, ((LongObjectInspector) inspector).get(object));
return;
case FLOAT:
type.writeLong(builder, floatToRawIntBits(((FloatObjectInspector) inspector).get(object)));
return;
case DOUBLE:
type.writeDouble(builder, ((DoubleObjectInspector) inspector).get(object));
return;
case STRING:
type.writeSlice(builder, Slices.utf8Slice(((StringObjectInspector) inspector).getPrimitiveJavaObject(object)));
return;
case VARCHAR:
type.writeSlice(builder, Slices.utf8Slice(((HiveVarcharObjectInspector) inspector).getPrimitiveJavaObject(object).getValue()));
return;
case CHAR:
HiveChar hiveChar = ((HiveCharObjectInspector) inspector).getPrimitiveJavaObject(object);
type.writeSlice(builder, truncateToLengthAndTrimSpaces(Slices.utf8Slice(hiveChar.getValue()), ((CharType) type).getLength()));
return;
case DATE:
type.writeLong(builder, formatDateAsLong(object, (DateObjectInspector) inspector));
return;
case TIMESTAMP:
TimestampType timestampType = (TimestampType) type;
DecodedTimestamp timestamp = formatTimestamp(timestampType, object, (TimestampObjectInspector) inspector);
createTimestampEncoder(timestampType, DateTimeZone.UTC).write(timestamp, builder);
return;
case BINARY:
type.writeSlice(builder, Slices.wrappedBuffer(((BinaryObjectInspector) inspector).getPrimitiveJavaObject(object)));
return;
case DECIMAL:
DecimalType decimalType = (DecimalType) type;
HiveDecimalWritable hiveDecimal = ((HiveDecimalObjectInspector) inspector).getPrimitiveWritableObject(object);
if (decimalType.isShort()) {
type.writeLong(builder, DecimalUtils.getShortDecimalValue(hiveDecimal, decimalType.getScale()));
} else {
type.writeObject(builder, DecimalUtils.getLongDecimalValue(hiveDecimal, decimalType.getScale()));
}
return;
case VOID:
case TIMESTAMPLOCALTZ:
case INTERVAL_YEAR_MONTH:
case INTERVAL_DAY_TIME:
case UNKNOWN:
}
throw new RuntimeException("Unknown primitive type: " + inspector.getPrimitiveCategory());
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector in project hive by apache.
the class StatsUtils method getAvgColLenOf.
/**
* Get the raw data size of variable length data types
* @param conf
* - hive conf
* @param oi
* - object inspector
* @param colType
* - column type
* @return raw data size
*/
public static long getAvgColLenOf(HiveConf conf, ObjectInspector oi, String colType) {
long configVarLen = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_STATS_MAX_VARIABLE_LENGTH);
String colTypeLowCase = colType.toLowerCase();
if (colTypeLowCase.equals(serdeConstants.STRING_TYPE_NAME)) {
// constant string projection Ex: select "hello" from table
if (oi instanceof ConstantObjectInspector) {
ConstantObjectInspector coi = (ConstantObjectInspector) oi;
// if writable constant is null then return size 0
Object constantValue = coi.getWritableConstantValue();
return constantValue == null ? 0 : constantValue.toString().length();
} else if (oi instanceof StringObjectInspector) {
// return the variable length from config
return configVarLen;
}
} else if (colTypeLowCase.startsWith(serdeConstants.VARCHAR_TYPE_NAME)) {
// constant varchar projection
if (oi instanceof ConstantObjectInspector) {
ConstantObjectInspector coi = (ConstantObjectInspector) oi;
// if writable constant is null then return size 0
Object constantValue = coi.getWritableConstantValue();
return constantValue == null ? 0 : constantValue.toString().length();
} else if (oi instanceof HiveVarcharObjectInspector) {
VarcharTypeInfo type = (VarcharTypeInfo) ((HiveVarcharObjectInspector) oi).getTypeInfo();
return type.getLength();
}
} else if (colTypeLowCase.startsWith(serdeConstants.CHAR_TYPE_NAME)) {
// constant char projection
if (oi instanceof ConstantObjectInspector) {
ConstantObjectInspector coi = (ConstantObjectInspector) oi;
// if writable constant is null then return size 0
Object constantValue = coi.getWritableConstantValue();
return constantValue == null ? 0 : constantValue.toString().length();
} else if (oi instanceof HiveCharObjectInspector) {
CharTypeInfo type = (CharTypeInfo) ((HiveCharObjectInspector) oi).getTypeInfo();
return type.getLength();
}
} else if (colTypeLowCase.equals(serdeConstants.BINARY_TYPE_NAME)) {
// constant byte arrays
if (oi instanceof ConstantObjectInspector) {
ConstantObjectInspector coi = (ConstantObjectInspector) oi;
// if writable constant is null then return size 0
BytesWritable constantValue = (BytesWritable) coi.getWritableConstantValue();
return constantValue == null ? 0 : constantValue.getLength();
} else if (oi instanceof BinaryObjectInspector) {
// return the variable length from config
return configVarLen;
}
} else {
// complex types (map, list, struct, union)
return getSizeOfComplexTypes(conf, oi);
}
throw new IllegalArgumentException("Size requested for unknown type: " + colType + " OI: " + oi.getTypeName());
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector in project hive by apache.
the class BinarySortableSerDe method serialize.
static void serialize(ByteStream.Output buffer, Object o, ObjectInspector oi, boolean invert, byte nullMarker, byte notNullMarker) throws SerDeException {
// Is this field a null?
if (o == null) {
writeByte(buffer, nullMarker, invert);
return;
}
// This field is not a null.
writeByte(buffer, notNullMarker, invert);
switch(oi.getCategory()) {
case PRIMITIVE:
{
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
switch(poi.getPrimitiveCategory()) {
case VOID:
{
return;
}
case BOOLEAN:
{
boolean v = ((BooleanObjectInspector) poi).get(o);
writeByte(buffer, (byte) (v ? 2 : 1), invert);
return;
}
case BYTE:
{
ByteObjectInspector boi = (ByteObjectInspector) poi;
byte v = boi.get(o);
writeByte(buffer, (byte) (v ^ 0x80), invert);
return;
}
case SHORT:
{
ShortObjectInspector spoi = (ShortObjectInspector) poi;
short v = spoi.get(o);
serializeShort(buffer, v, invert);
return;
}
case INT:
{
IntObjectInspector ioi = (IntObjectInspector) poi;
int v = ioi.get(o);
serializeInt(buffer, v, invert);
return;
}
case LONG:
{
LongObjectInspector loi = (LongObjectInspector) poi;
long v = loi.get(o);
serializeLong(buffer, v, invert);
return;
}
case FLOAT:
{
FloatObjectInspector foi = (FloatObjectInspector) poi;
serializeFloat(buffer, foi.get(o), invert);
return;
}
case DOUBLE:
{
DoubleObjectInspector doi = (DoubleObjectInspector) poi;
serializeDouble(buffer, doi.get(o), invert);
return;
}
case STRING:
{
StringObjectInspector soi = (StringObjectInspector) poi;
Text t = soi.getPrimitiveWritableObject(o);
serializeBytes(buffer, t.getBytes(), t.getLength(), invert);
return;
}
case CHAR:
{
HiveCharObjectInspector hcoi = (HiveCharObjectInspector) poi;
HiveCharWritable hc = hcoi.getPrimitiveWritableObject(o);
// Trailing space should ignored for char comparisons.
// So write stripped values for this SerDe.
Text t = hc.getStrippedValue();
serializeBytes(buffer, t.getBytes(), t.getLength(), invert);
return;
}
case VARCHAR:
{
HiveVarcharObjectInspector hcoi = (HiveVarcharObjectInspector) poi;
HiveVarcharWritable hc = hcoi.getPrimitiveWritableObject(o);
// use varchar's text field directly
Text t = hc.getTextValue();
serializeBytes(buffer, t.getBytes(), t.getLength(), invert);
return;
}
case BINARY:
{
BinaryObjectInspector baoi = (BinaryObjectInspector) poi;
BytesWritable ba = baoi.getPrimitiveWritableObject(o);
byte[] toSer = new byte[ba.getLength()];
System.arraycopy(ba.getBytes(), 0, toSer, 0, ba.getLength());
serializeBytes(buffer, toSer, ba.getLength(), invert);
return;
}
case DATE:
{
DateObjectInspector doi = (DateObjectInspector) poi;
int v = doi.getPrimitiveWritableObject(o).getDays();
serializeInt(buffer, v, invert);
return;
}
case TIMESTAMP:
{
TimestampObjectInspector toi = (TimestampObjectInspector) poi;
TimestampWritableV2 t = toi.getPrimitiveWritableObject(o);
serializeTimestampWritable(buffer, t, invert);
return;
}
case TIMESTAMPLOCALTZ:
{
TimestampLocalTZObjectInspector toi = (TimestampLocalTZObjectInspector) poi;
TimestampLocalTZWritable t = toi.getPrimitiveWritableObject(o);
serializeTimestampTZWritable(buffer, t, invert);
return;
}
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonthObjectInspector ioi = (HiveIntervalYearMonthObjectInspector) poi;
HiveIntervalYearMonth intervalYearMonth = ioi.getPrimitiveJavaObject(o);
serializeHiveIntervalYearMonth(buffer, intervalYearMonth, invert);
return;
}
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTimeObjectInspector ioi = (HiveIntervalDayTimeObjectInspector) poi;
HiveIntervalDayTime intervalDayTime = ioi.getPrimitiveJavaObject(o);
serializeHiveIntervalDayTime(buffer, intervalDayTime, invert);
return;
}
case DECIMAL:
{
HiveDecimalObjectInspector boi = (HiveDecimalObjectInspector) poi;
HiveDecimal dec = boi.getPrimitiveJavaObject(o);
serializeHiveDecimal(buffer, dec, invert);
return;
}
default:
{
throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
}
}
}
case LIST:
{
ListObjectInspector loi = (ListObjectInspector) oi;
ObjectInspector eoi = loi.getListElementObjectInspector();
// \1 followed by each element
int size = loi.getListLength(o);
for (int eid = 0; eid < size; eid++) {
writeByte(buffer, (byte) 1, invert);
serialize(buffer, loi.getListElement(o, eid), eoi, invert, nullMarker, notNullMarker);
}
// and \0 to terminate
writeByte(buffer, (byte) 0, invert);
return;
}
case MAP:
{
MapObjectInspector moi = (MapObjectInspector) oi;
ObjectInspector koi = moi.getMapKeyObjectInspector();
ObjectInspector voi = moi.getMapValueObjectInspector();
// \1 followed by each key and then each value
Map<?, ?> map = moi.getMap(o);
for (Map.Entry<?, ?> entry : map.entrySet()) {
writeByte(buffer, (byte) 1, invert);
serialize(buffer, entry.getKey(), koi, invert, nullMarker, notNullMarker);
serialize(buffer, entry.getValue(), voi, invert, nullMarker, notNullMarker);
}
// and \0 to terminate
writeByte(buffer, (byte) 0, invert);
return;
}
case STRUCT:
{
StructObjectInspector soi = (StructObjectInspector) oi;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
for (int i = 0; i < fields.size(); i++) {
serialize(buffer, soi.getStructFieldData(o, fields.get(i)), fields.get(i).getFieldObjectInspector(), invert, nullMarker, notNullMarker);
}
return;
}
case UNION:
{
UnionObjectInspector uoi = (UnionObjectInspector) oi;
byte tag = uoi.getTag(o);
writeByte(buffer, tag, invert);
serialize(buffer, uoi.getField(o), uoi.getObjectInspectors().get(tag), invert, nullMarker, notNullMarker);
return;
}
default:
{
throw new RuntimeException("Unrecognized type: " + oi.getCategory());
}
}
}
Aggregations