Search in sources :

Example 16 with HiveVarcharWritable

use of org.apache.hadoop.hive.serde2.io.HiveVarcharWritable in project hive by apache.

the class VectorizedBatchUtil method setVector.

private static void setVector(Object row, StructObjectInspector oi, StructField field, VectorizedRowBatch batch, DataOutputBuffer buffer, int rowIndex, int colIndex, int offset) throws HiveException {
    Object fieldData = oi.getStructFieldData(row, field);
    ObjectInspector foi = field.getFieldObjectInspector();
    // Vectorization only supports PRIMITIVE data types. Assert the same
    assert (foi.getCategory() == Category.PRIMITIVE);
    // Get writable object
    PrimitiveObjectInspector poi = (PrimitiveObjectInspector) foi;
    Object writableCol = poi.getPrimitiveWritableObject(fieldData);
    // float/double. String types have no default value for null.
    switch(poi.getPrimitiveCategory()) {
        case BOOLEAN:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.vector[rowIndex] = ((BooleanWritable) writableCol).get() ? 1 : 0;
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case BYTE:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.vector[rowIndex] = ((ByteWritable) writableCol).get();
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case SHORT:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.vector[rowIndex] = ((ShortWritable) writableCol).get();
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case INT:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.vector[rowIndex] = ((IntWritable) writableCol).get();
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case LONG:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.vector[rowIndex] = ((LongWritable) writableCol).get();
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case DATE:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.vector[rowIndex] = ((DateWritable) writableCol).getDays();
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case FLOAT:
            {
                DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    dcv.vector[rowIndex] = ((FloatWritable) writableCol).get();
                    dcv.isNull[rowIndex] = false;
                } else {
                    dcv.vector[rowIndex] = Double.NaN;
                    setNullColIsNullValue(dcv, rowIndex);
                }
            }
            break;
        case DOUBLE:
            {
                DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    dcv.vector[rowIndex] = ((DoubleWritable) writableCol).get();
                    dcv.isNull[rowIndex] = false;
                } else {
                    dcv.vector[rowIndex] = Double.NaN;
                    setNullColIsNullValue(dcv, rowIndex);
                }
            }
            break;
        case TIMESTAMP:
            {
                TimestampColumnVector lcv = (TimestampColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    lcv.set(rowIndex, ((TimestampWritable) writableCol).getTimestamp());
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.setNullValue(rowIndex);
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case INTERVAL_YEAR_MONTH:
            {
                LongColumnVector lcv = (LongColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    HiveIntervalYearMonth i = ((HiveIntervalYearMonthWritable) writableCol).getHiveIntervalYearMonth();
                    lcv.vector[rowIndex] = i.getTotalMonths();
                    lcv.isNull[rowIndex] = false;
                } else {
                    lcv.vector[rowIndex] = 1;
                    setNullColIsNullValue(lcv, rowIndex);
                }
            }
            break;
        case INTERVAL_DAY_TIME:
            {
                IntervalDayTimeColumnVector icv = (IntervalDayTimeColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    HiveIntervalDayTime idt = ((HiveIntervalDayTimeWritable) writableCol).getHiveIntervalDayTime();
                    icv.set(rowIndex, idt);
                    icv.isNull[rowIndex] = false;
                } else {
                    icv.setNullValue(rowIndex);
                    setNullColIsNullValue(icv, rowIndex);
                }
            }
            break;
        case BINARY:
            {
                BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    bcv.isNull[rowIndex] = false;
                    BytesWritable bw = (BytesWritable) writableCol;
                    byte[] bytes = bw.getBytes();
                    int start = buffer.getLength();
                    int length = bw.getLength();
                    try {
                        buffer.write(bytes, 0, length);
                    } catch (IOException ioe) {
                        throw new IllegalStateException("bad write", ioe);
                    }
                    bcv.setRef(rowIndex, buffer.getData(), start, length);
                } else {
                    setNullColIsNullValue(bcv, rowIndex);
                }
            }
            break;
        case STRING:
            {
                BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    bcv.isNull[rowIndex] = false;
                    Text colText = (Text) writableCol;
                    int start = buffer.getLength();
                    int length = colText.getLength();
                    try {
                        buffer.write(colText.getBytes(), 0, length);
                    } catch (IOException ioe) {
                        throw new IllegalStateException("bad write", ioe);
                    }
                    bcv.setRef(rowIndex, buffer.getData(), start, length);
                } else {
                    setNullColIsNullValue(bcv, rowIndex);
                }
            }
            break;
        case CHAR:
            {
                BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    bcv.isNull[rowIndex] = false;
                    HiveChar colHiveChar = ((HiveCharWritable) writableCol).getHiveChar();
                    byte[] bytes = colHiveChar.getStrippedValue().getBytes();
                    // We assume the CHAR maximum length was enforced when the object was created.
                    int length = bytes.length;
                    int start = buffer.getLength();
                    try {
                        // In vector mode, we store CHAR as unpadded.
                        buffer.write(bytes, 0, length);
                    } catch (IOException ioe) {
                        throw new IllegalStateException("bad write", ioe);
                    }
                    bcv.setRef(rowIndex, buffer.getData(), start, length);
                } else {
                    setNullColIsNullValue(bcv, rowIndex);
                }
            }
            break;
        case VARCHAR:
            {
                BytesColumnVector bcv = (BytesColumnVector) batch.cols[offset + colIndex];
                if (writableCol != null) {
                    bcv.isNull[rowIndex] = false;
                    HiveVarchar colHiveVarchar = ((HiveVarcharWritable) writableCol).getHiveVarchar();
                    byte[] bytes = colHiveVarchar.getValue().getBytes();
                    // We assume the VARCHAR maximum length was enforced when the object was created.
                    int length = bytes.length;
                    int start = buffer.getLength();
                    try {
                        buffer.write(bytes, 0, length);
                    } catch (IOException ioe) {
                        throw new IllegalStateException("bad write", ioe);
                    }
                    bcv.setRef(rowIndex, buffer.getData(), start, length);
                } else {
                    setNullColIsNullValue(bcv, rowIndex);
                }
            }
            break;
        case DECIMAL:
            DecimalColumnVector dcv = (DecimalColumnVector) batch.cols[offset + colIndex];
            if (writableCol != null) {
                dcv.isNull[rowIndex] = false;
                HiveDecimalWritable wobj = (HiveDecimalWritable) writableCol;
                dcv.set(rowIndex, wobj);
            } else {
                setNullColIsNullValue(dcv, rowIndex);
            }
            break;
        default:
            throw new HiveException("Vectorizaton is not supported for datatype:" + poi.getPrimitiveCategory());
    }
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) UnionObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 17 with HiveVarcharWritable

use of org.apache.hadoop.hive.serde2.io.HiveVarcharWritable in project hive by apache.

the class VectorUDFAdaptor method setOutputCol.

private void setOutputCol(ColumnVector colVec, int i, Object value) {
    /* Depending on the output type, get the value, cast the result to the
     * correct type if needed, and assign the result into the output vector.
     */
    if (outputOI instanceof WritableStringObjectInspector) {
        BytesColumnVector bv = (BytesColumnVector) colVec;
        Text t;
        if (value instanceof String) {
            t = new Text((String) value);
        } else {
            t = ((WritableStringObjectInspector) outputOI).getPrimitiveWritableObject(value);
        }
        bv.setVal(i, t.getBytes(), 0, t.getLength());
    } else if (outputOI instanceof WritableHiveCharObjectInspector) {
        WritableHiveCharObjectInspector writableHiveCharObjectOI = (WritableHiveCharObjectInspector) outputOI;
        int maxLength = ((CharTypeInfo) writableHiveCharObjectOI.getTypeInfo()).getLength();
        BytesColumnVector bv = (BytesColumnVector) colVec;
        HiveCharWritable hiveCharWritable;
        if (value instanceof HiveCharWritable) {
            hiveCharWritable = ((HiveCharWritable) value);
        } else {
            hiveCharWritable = writableHiveCharObjectOI.getPrimitiveWritableObject(value);
        }
        Text t = hiveCharWritable.getTextValue();
        // In vector mode, we stored CHAR as unpadded.
        StringExpr.rightTrimAndTruncate(bv, i, t.getBytes(), 0, t.getLength(), maxLength);
    } else if (outputOI instanceof WritableHiveVarcharObjectInspector) {
        WritableHiveVarcharObjectInspector writableHiveVarcharObjectOI = (WritableHiveVarcharObjectInspector) outputOI;
        int maxLength = ((VarcharTypeInfo) writableHiveVarcharObjectOI.getTypeInfo()).getLength();
        BytesColumnVector bv = (BytesColumnVector) colVec;
        HiveVarcharWritable hiveVarcharWritable;
        if (value instanceof HiveVarcharWritable) {
            hiveVarcharWritable = ((HiveVarcharWritable) value);
        } else {
            hiveVarcharWritable = writableHiveVarcharObjectOI.getPrimitiveWritableObject(value);
        }
        Text t = hiveVarcharWritable.getTextValue();
        StringExpr.truncate(bv, i, t.getBytes(), 0, t.getLength(), maxLength);
    } else if (outputOI instanceof WritableIntObjectInspector) {
        LongColumnVector lv = (LongColumnVector) colVec;
        if (value instanceof Integer) {
            lv.vector[i] = (Integer) value;
        } else {
            lv.vector[i] = ((WritableIntObjectInspector) outputOI).get(value);
        }
    } else if (outputOI instanceof WritableLongObjectInspector) {
        LongColumnVector lv = (LongColumnVector) colVec;
        if (value instanceof Long) {
            lv.vector[i] = (Long) value;
        } else {
            lv.vector[i] = ((WritableLongObjectInspector) outputOI).get(value);
        }
    } else if (outputOI instanceof WritableDoubleObjectInspector) {
        DoubleColumnVector dv = (DoubleColumnVector) colVec;
        if (value instanceof Double) {
            dv.vector[i] = (Double) value;
        } else {
            dv.vector[i] = ((WritableDoubleObjectInspector) outputOI).get(value);
        }
    } else if (outputOI instanceof WritableFloatObjectInspector) {
        DoubleColumnVector dv = (DoubleColumnVector) colVec;
        if (value instanceof Float) {
            dv.vector[i] = (Float) value;
        } else {
            dv.vector[i] = ((WritableFloatObjectInspector) outputOI).get(value);
        }
    } else if (outputOI instanceof WritableShortObjectInspector) {
        LongColumnVector lv = (LongColumnVector) colVec;
        if (value instanceof Short) {
            lv.vector[i] = (Short) value;
        } else {
            lv.vector[i] = ((WritableShortObjectInspector) outputOI).get(value);
        }
    } else if (outputOI instanceof WritableByteObjectInspector) {
        LongColumnVector lv = (LongColumnVector) colVec;
        if (value instanceof Byte) {
            lv.vector[i] = (Byte) value;
        } else {
            lv.vector[i] = ((WritableByteObjectInspector) outputOI).get(value);
        }
    } else if (outputOI instanceof WritableTimestampObjectInspector) {
        TimestampColumnVector tv = (TimestampColumnVector) colVec;
        Timestamp ts;
        if (value instanceof Timestamp) {
            ts = (Timestamp) value;
        } else {
            ts = ((WritableTimestampObjectInspector) outputOI).getPrimitiveJavaObject(value);
        }
        tv.set(i, ts);
    } else if (outputOI instanceof WritableDateObjectInspector) {
        LongColumnVector lv = (LongColumnVector) colVec;
        Date ts;
        if (value instanceof Date) {
            ts = (Date) value;
        } else {
            ts = ((WritableDateObjectInspector) outputOI).getPrimitiveJavaObject(value);
        }
        long l = DateWritable.dateToDays(ts);
        lv.vector[i] = l;
    } else if (outputOI instanceof WritableBooleanObjectInspector) {
        LongColumnVector lv = (LongColumnVector) colVec;
        if (value instanceof Boolean) {
            lv.vector[i] = (Boolean) value ? 1 : 0;
        } else {
            lv.vector[i] = ((WritableBooleanObjectInspector) outputOI).get(value) ? 1 : 0;
        }
    } else if (outputOI instanceof WritableHiveDecimalObjectInspector) {
        DecimalColumnVector dcv = (DecimalColumnVector) colVec;
        if (value instanceof HiveDecimal) {
            dcv.set(i, (HiveDecimal) value);
        } else {
            HiveDecimal hd = ((WritableHiveDecimalObjectInspector) outputOI).getPrimitiveJavaObject(value);
            dcv.set(i, hd);
        }
    } else if (outputOI instanceof WritableBinaryObjectInspector) {
        BytesWritable bw = (BytesWritable) value;
        BytesColumnVector bv = (BytesColumnVector) colVec;
        bv.setVal(i, bw.getBytes(), 0, bw.getLength());
    } else {
        throw new RuntimeException("Unhandled object type " + outputOI.getTypeName() + " inspector class " + outputOI.getClass().getName() + " value class " + value.getClass().getName());
    }
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) Timestamp(java.sql.Timestamp) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) Date(java.sql.Date) WritableBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector)

Example 18 with HiveVarcharWritable

use of org.apache.hadoop.hive.serde2.io.HiveVarcharWritable in project hive by apache.

the class ReaderWriter method writeDatum.

public static void writeDatum(DataOutput out, Object val) throws IOException {
    // write the data type
    byte type = DataType.findType(val);
    out.write(type);
    switch(type) {
        case DataType.LIST:
            List<?> list = (List<?>) val;
            int sz = list.size();
            out.writeInt(sz);
            for (int i = 0; i < sz; i++) {
                writeDatum(out, list.get(i));
            }
            return;
        case DataType.MAP:
            Map<?, ?> m = (Map<?, ?>) val;
            out.writeInt(m.size());
            Iterator<?> i = m.entrySet().iterator();
            while (i.hasNext()) {
                Entry<?, ?> entry = (Entry<?, ?>) i.next();
                writeDatum(out, entry.getKey());
                writeDatum(out, entry.getValue());
            }
            return;
        case DataType.INTEGER:
            new VIntWritable((Integer) val).write(out);
            return;
        case DataType.LONG:
            new VLongWritable((Long) val).write(out);
            return;
        case DataType.FLOAT:
            out.writeFloat((Float) val);
            return;
        case DataType.DOUBLE:
            out.writeDouble((Double) val);
            return;
        case DataType.BOOLEAN:
            out.writeBoolean((Boolean) val);
            return;
        case DataType.BYTE:
            out.writeByte((Byte) val);
            return;
        case DataType.SHORT:
            out.writeShort((Short) val);
            return;
        case DataType.STRING:
            String s = (String) val;
            byte[] utfBytes = s.getBytes(ReaderWriter.UTF8);
            out.writeInt(utfBytes.length);
            out.write(utfBytes);
            return;
        case DataType.BINARY:
            byte[] ba = (byte[]) val;
            out.writeInt(ba.length);
            out.write(ba);
            return;
        case DataType.NULL:
            //for NULL we just write out the type
            return;
        case DataType.CHAR:
            new HiveCharWritable((HiveChar) val).write(out);
            return;
        case DataType.VARCHAR:
            new HiveVarcharWritable((HiveVarchar) val).write(out);
            return;
        case DataType.DECIMAL:
            new HiveDecimalWritable((HiveDecimal) val).write(out);
            return;
        case DataType.DATE:
            new DateWritable((Date) val).write(out);
            return;
        case DataType.TIMESTAMP:
            new TimestampWritable((java.sql.Timestamp) val).write(out);
            return;
        default:
            throw new IOException("Unexpected data type " + type + " found in stream.");
    }
}
Also used : VIntWritable(org.apache.hadoop.io.VIntWritable) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) Entry(java.util.Map.Entry) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ArrayList(java.util.ArrayList) List(java.util.List) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) IOException(java.io.IOException) Date(java.sql.Date) VLongWritable(org.apache.hadoop.io.VLongWritable) HashMap(java.util.HashMap) Map(java.util.Map)

Example 19 with HiveVarcharWritable

use of org.apache.hadoop.hive.serde2.io.HiveVarcharWritable in project hive by apache.

the class WritableHiveVarcharObjectInspector method copyObject.

@Override
public Object copyObject(Object o) {
    if (o == null) {
        return null;
    }
    if (o instanceof Text) {
        String str = ((Text) o).toString();
        HiveVarcharWritable hcw = new HiveVarcharWritable();
        hcw.set(str, ((VarcharTypeInfo) typeInfo).getLength());
        return hcw;
    }
    HiveVarcharWritable writable = (HiveVarcharWritable) o;
    if (doesWritableMatchTypeParams((HiveVarcharWritable) o)) {
        return new HiveVarcharWritable(writable);
    }
    return getWritableWithParams(writable);
}
Also used : HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text)

Example 20 with HiveVarcharWritable

use of org.apache.hadoop.hive.serde2.io.HiveVarcharWritable in project hive by apache.

the class WritableHiveVarcharObjectInspector method getPrimitiveWritableObject.

@Override
public HiveVarcharWritable getPrimitiveWritableObject(Object o) {
    // then output new writable with correct params.
    if (o == null) {
        return null;
    }
    if (o instanceof Text) {
        String str = ((Text) o).toString();
        HiveVarcharWritable hcw = new HiveVarcharWritable();
        hcw.set(str, ((VarcharTypeInfo) typeInfo).getLength());
        return hcw;
    }
    HiveVarcharWritable writable = ((HiveVarcharWritable) o);
    if (doesWritableMatchTypeParams((HiveVarcharWritable) o)) {
        return writable;
    }
    return getWritableWithParams(writable);
}
Also used : HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text)

Aggregations

HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)37 Text (org.apache.hadoop.io.Text)22 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)20 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)18 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)17 IntWritable (org.apache.hadoop.io.IntWritable)16 Test (org.junit.Test)16 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)15 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)15 BytesWritable (org.apache.hadoop.io.BytesWritable)14 LongWritable (org.apache.hadoop.io.LongWritable)13 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)12 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)12 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)12 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)11 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)11 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)11 FloatWritable (org.apache.hadoop.io.FloatWritable)11 ArrayList (java.util.ArrayList)10 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)10