Search in sources :

Example 26 with DateWritableV2

use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.

the class TestGenericUDFDateFormat method runAndVerifyDate.

private void runAndVerifyDate(String str, Text fmtText, String expResult, GenericUDF udf) throws HiveException {
    DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritableV2(Date.valueOf(str)) : null);
    DeferredObject valueObj1 = new DeferredJavaObject(fmtText);
    DeferredObject[] args = { valueObj0, valueObj1 };
    Text output = (Text) udf.evaluate(args);
    assertEquals("date_format() test ", expResult, output != null ? output.toString() : null);
}
Also used : DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) Text(org.apache.hadoop.io.Text)

Example 27 with DateWritableV2

use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.

the class TestGenericUDFDatetimeLegacyHybridCalendar method testDateLegacyHybridCalendar.

@Test
public void testDateLegacyHybridCalendar() throws Exception {
    ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
    GenericUDFDatetimeLegacyHybridCalendar udf = new GenericUDFDatetimeLegacyHybridCalendar();
    ObjectInspector[] args2 = { valueOI, valueOI };
    udf.initialize(args2);
    runAndVerify(udf, new DateWritableV2(Date.valueOf("0000-12-30")), new DateWritableV2(Date.valueOf("0001-01-01")));
    runAndVerify(udf, new DateWritableV2(Date.valueOf("0601-03-07")), new DateWritableV2(Date.valueOf("0601-03-04")));
    runAndVerify(udf, new DateWritableV2(Date.valueOf("1582-10-14")), new DateWritableV2(Date.valueOf("1582-10-04")));
    runAndVerify(udf, new DateWritableV2(Date.valueOf("1582-10-15")), new DateWritableV2(Date.valueOf("1582-10-15")));
    runAndVerify(udf, new DateWritableV2(Date.valueOf("2015-03-07")), new DateWritableV2(Date.valueOf("2015-03-07")));
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) Test(org.junit.Test)

Example 28 with DateWritableV2

use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.

the class LazySimpleSerializeWrite method writeDate.

// We provide a faster way to write a date without a Date object.
@Override
public void writeDate(int dateAsDays) throws IOException {
    beginPrimitive();
    if (dateWritable == null) {
        dateWritable = new DateWritableV2();
    }
    dateWritable.set(dateAsDays);
    LazyDate.writeUTF8(output, dateWritable);
    finishPrimitive();
}
Also used : DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2)

Example 29 with DateWritableV2

use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.

the class DeserializeRead method allocateCurrentWritable.

/*
   * This class is used to read one field at a time.  Simple fields like long, double, int are read
   * into to primitive current* members; the non-simple field types like Date, Timestamp, etc, are
   * read into a current object that this method will allocate.
   *
   * This method handles complex type fields by recursively calling this method.
   */
private void allocateCurrentWritable(TypeInfo typeInfo) {
    switch(typeInfo.getCategory()) {
        case PRIMITIVE:
            switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
                case DATE:
                    if (currentDateWritable == null) {
                        currentDateWritable = new DateWritableV2();
                    }
                    break;
                case TIMESTAMP:
                    if (currentTimestampWritable == null) {
                        currentTimestampWritable = new TimestampWritableV2();
                    }
                    break;
                case INTERVAL_YEAR_MONTH:
                    if (currentHiveIntervalYearMonthWritable == null) {
                        currentHiveIntervalYearMonthWritable = new HiveIntervalYearMonthWritable();
                    }
                    break;
                case INTERVAL_DAY_TIME:
                    if (currentHiveIntervalDayTimeWritable == null) {
                        currentHiveIntervalDayTimeWritable = new HiveIntervalDayTimeWritable();
                    }
                    break;
                case DECIMAL:
                    if (currentHiveDecimalWritable == null) {
                        currentHiveDecimalWritable = new HiveDecimalWritable();
                    }
                    break;
                default:
            }
            break;
        case LIST:
            allocateCurrentWritable(((ListTypeInfo) typeInfo).getListElementTypeInfo());
            break;
        case MAP:
            allocateCurrentWritable(((MapTypeInfo) typeInfo).getMapKeyTypeInfo());
            allocateCurrentWritable(((MapTypeInfo) typeInfo).getMapValueTypeInfo());
            break;
        case STRUCT:
            for (TypeInfo fieldTypeInfo : ((StructTypeInfo) typeInfo).getAllStructFieldTypeInfos()) {
                allocateCurrentWritable(fieldTypeInfo);
            }
            break;
        case UNION:
            for (TypeInfo fieldTypeInfo : ((UnionTypeInfo) typeInfo).getAllUnionObjectTypeInfos()) {
                allocateCurrentWritable(fieldTypeInfo);
            }
            break;
        default:
            throw new RuntimeException("Unexpected category " + typeInfo.getCategory());
    }
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 30 with DateWritableV2

use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.

the class TeradataBinarySerde method serializeField.

private void serializeField(Object objectForField, ObjectInspector oi, TypeInfo ti) throws IOException, SerDeException {
    switch(oi.getCategory()) {
        case PRIMITIVE:
            PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
            switch(poi.getPrimitiveCategory()) {
                // Teradata Type: BYTEINT
                case BYTE:
                    ByteObjectInspector boi = (ByteObjectInspector) poi;
                    byte b = 0;
                    if (objectForField != null) {
                        b = boi.get(objectForField);
                    }
                    out.write(b);
                    return;
                // Teradata Type: SMALLINT
                case SHORT:
                    ShortObjectInspector spoi = (ShortObjectInspector) poi;
                    short s = 0;
                    if (objectForField != null) {
                        s = spoi.get(objectForField);
                    }
                    out.writeShort(s);
                    return;
                // Teradata Type: INT
                case INT:
                    IntObjectInspector ioi = (IntObjectInspector) poi;
                    int i = 0;
                    if (objectForField != null) {
                        i = ioi.get(objectForField);
                    }
                    out.writeInt(i);
                    return;
                // Teradata Type: BIGINT
                case LONG:
                    LongObjectInspector loi = (LongObjectInspector) poi;
                    long l = 0;
                    if (objectForField != null) {
                        l = loi.get(objectForField);
                    }
                    out.writeLong(l);
                    return;
                // Teradata Type: FLOAT
                case DOUBLE:
                    DoubleObjectInspector doi = (DoubleObjectInspector) poi;
                    double d = 0;
                    if (objectForField != null) {
                        d = doi.get(objectForField);
                    }
                    out.writeDouble(d);
                    return;
                // Teradata Type: VARCHAR
                case VARCHAR:
                    HiveVarcharObjectInspector hvoi = (HiveVarcharObjectInspector) poi;
                    HiveVarcharWritable hv = hvoi.getPrimitiveWritableObject(objectForField);
                    // assert the length of varchar record fits into the table definition
                    if (hv != null) {
                        assert ((VarcharTypeInfo) ti).getLength() >= hv.getHiveVarchar().getCharacterLength();
                    }
                    out.writeVarChar(hv);
                    return;
                // Teradata Type: TIMESTAMP
                case TIMESTAMP:
                    TimestampObjectInspector tsoi = (TimestampObjectInspector) poi;
                    TimestampWritableV2 ts = tsoi.getPrimitiveWritableObject(objectForField);
                    out.writeTimestamp(ts, getTimeStampByteNum(timestampPrecision));
                    return;
                // Teradata Type: DATE
                case DATE:
                    DateObjectInspector dtoi = (DateObjectInspector) poi;
                    DateWritableV2 dw = dtoi.getPrimitiveWritableObject(objectForField);
                    out.writeDate(dw);
                    return;
                // Teradata Type: CHAR
                case CHAR:
                    HiveCharObjectInspector coi = (HiveCharObjectInspector) poi;
                    HiveCharWritable hc = coi.getPrimitiveWritableObject(objectForField);
                    // assert the length of char record fits into the table definition
                    if (hc != null) {
                        assert ((CharTypeInfo) ti).getLength() >= hc.getHiveChar().getCharacterLength();
                    }
                    out.writeChar(hc, getCharByteNum(charCharset) * ((CharTypeInfo) ti).getLength());
                    return;
                // Teradata Type: DECIMAL
                case DECIMAL:
                    DecimalTypeInfo dtype = (DecimalTypeInfo) ti;
                    int precision = dtype.precision();
                    int scale = dtype.scale();
                    HiveDecimalObjectInspector hdoi = (HiveDecimalObjectInspector) poi;
                    HiveDecimalWritable hd = hdoi.getPrimitiveWritableObject(objectForField);
                    // assert the precision of decimal record fits into the table definition
                    if (hd != null) {
                        assert (dtype.getPrecision() >= hd.precision());
                    }
                    out.writeDecimal(hd, getDecimalByteNum(precision), scale);
                    return;
                // Teradata Type: VARBYTE
                case BINARY:
                    BinaryObjectInspector bnoi = (BinaryObjectInspector) poi;
                    BytesWritable byw = bnoi.getPrimitiveWritableObject(objectForField);
                    out.writeVarByte(byw);
                    return;
                default:
                    throw new SerDeException("Unrecognized type: " + poi.getPrimitiveCategory());
            }
        // Currently, serialization of complex types is not supported
        case LIST:
        case MAP:
        case STRUCT:
        default:
            throw new SerDeException("Unrecognized type: " + oi.getCategory());
    }
}
Also used : LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) SerDeException(org.apache.hadoop.hive.serde2.SerDeException) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Aggregations

DateWritableV2 (org.apache.hadoop.hive.serde2.io.DateWritableV2)73 Test (org.junit.Test)36 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)34 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)29 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)29 Text (org.apache.hadoop.io.Text)29 TimestampWritableV2 (org.apache.hadoop.hive.serde2.io.TimestampWritableV2)28 Date (org.apache.hadoop.hive.common.type.Date)24 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)23 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)21 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)21 BytesWritable (org.apache.hadoop.io.BytesWritable)20 IntWritable (org.apache.hadoop.io.IntWritable)17 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)16 HiveIntervalDayTimeWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable)16 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)15 HiveIntervalYearMonthWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable)15 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)14 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)14 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)14