use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class TestGenericUDFDateFormat method runAndVerifyDate.
private void runAndVerifyDate(String str, Text fmtText, String expResult, GenericUDF udf) throws HiveException {
DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new DateWritableV2(Date.valueOf(str)) : null);
DeferredObject valueObj1 = new DeferredJavaObject(fmtText);
DeferredObject[] args = { valueObj0, valueObj1 };
Text output = (Text) udf.evaluate(args);
assertEquals("date_format() test ", expResult, output != null ? output.toString() : null);
}
use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class TestGenericUDFDatetimeLegacyHybridCalendar method testDateLegacyHybridCalendar.
@Test
public void testDateLegacyHybridCalendar() throws Exception {
ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
GenericUDFDatetimeLegacyHybridCalendar udf = new GenericUDFDatetimeLegacyHybridCalendar();
ObjectInspector[] args2 = { valueOI, valueOI };
udf.initialize(args2);
runAndVerify(udf, new DateWritableV2(Date.valueOf("0000-12-30")), new DateWritableV2(Date.valueOf("0001-01-01")));
runAndVerify(udf, new DateWritableV2(Date.valueOf("0601-03-07")), new DateWritableV2(Date.valueOf("0601-03-04")));
runAndVerify(udf, new DateWritableV2(Date.valueOf("1582-10-14")), new DateWritableV2(Date.valueOf("1582-10-04")));
runAndVerify(udf, new DateWritableV2(Date.valueOf("1582-10-15")), new DateWritableV2(Date.valueOf("1582-10-15")));
runAndVerify(udf, new DateWritableV2(Date.valueOf("2015-03-07")), new DateWritableV2(Date.valueOf("2015-03-07")));
}
use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class LazySimpleSerializeWrite method writeDate.
// We provide a faster way to write a date without a Date object.
@Override
public void writeDate(int dateAsDays) throws IOException {
beginPrimitive();
if (dateWritable == null) {
dateWritable = new DateWritableV2();
}
dateWritable.set(dateAsDays);
LazyDate.writeUTF8(output, dateWritable);
finishPrimitive();
}
use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class DeserializeRead method allocateCurrentWritable.
/*
* This class is used to read one field at a time. Simple fields like long, double, int are read
* into to primitive current* members; the non-simple field types like Date, Timestamp, etc, are
* read into a current object that this method will allocate.
*
* This method handles complex type fields by recursively calling this method.
*/
private void allocateCurrentWritable(TypeInfo typeInfo) {
switch(typeInfo.getCategory()) {
case PRIMITIVE:
switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
case DATE:
if (currentDateWritable == null) {
currentDateWritable = new DateWritableV2();
}
break;
case TIMESTAMP:
if (currentTimestampWritable == null) {
currentTimestampWritable = new TimestampWritableV2();
}
break;
case INTERVAL_YEAR_MONTH:
if (currentHiveIntervalYearMonthWritable == null) {
currentHiveIntervalYearMonthWritable = new HiveIntervalYearMonthWritable();
}
break;
case INTERVAL_DAY_TIME:
if (currentHiveIntervalDayTimeWritable == null) {
currentHiveIntervalDayTimeWritable = new HiveIntervalDayTimeWritable();
}
break;
case DECIMAL:
if (currentHiveDecimalWritable == null) {
currentHiveDecimalWritable = new HiveDecimalWritable();
}
break;
default:
}
break;
case LIST:
allocateCurrentWritable(((ListTypeInfo) typeInfo).getListElementTypeInfo());
break;
case MAP:
allocateCurrentWritable(((MapTypeInfo) typeInfo).getMapKeyTypeInfo());
allocateCurrentWritable(((MapTypeInfo) typeInfo).getMapValueTypeInfo());
break;
case STRUCT:
for (TypeInfo fieldTypeInfo : ((StructTypeInfo) typeInfo).getAllStructFieldTypeInfos()) {
allocateCurrentWritable(fieldTypeInfo);
}
break;
case UNION:
for (TypeInfo fieldTypeInfo : ((UnionTypeInfo) typeInfo).getAllUnionObjectTypeInfos()) {
allocateCurrentWritable(fieldTypeInfo);
}
break;
default:
throw new RuntimeException("Unexpected category " + typeInfo.getCategory());
}
}
use of org.apache.hadoop.hive.serde2.io.DateWritableV2 in project hive by apache.
the class TeradataBinarySerde method serializeField.
private void serializeField(Object objectForField, ObjectInspector oi, TypeInfo ti) throws IOException, SerDeException {
switch(oi.getCategory()) {
case PRIMITIVE:
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
switch(poi.getPrimitiveCategory()) {
// Teradata Type: BYTEINT
case BYTE:
ByteObjectInspector boi = (ByteObjectInspector) poi;
byte b = 0;
if (objectForField != null) {
b = boi.get(objectForField);
}
out.write(b);
return;
// Teradata Type: SMALLINT
case SHORT:
ShortObjectInspector spoi = (ShortObjectInspector) poi;
short s = 0;
if (objectForField != null) {
s = spoi.get(objectForField);
}
out.writeShort(s);
return;
// Teradata Type: INT
case INT:
IntObjectInspector ioi = (IntObjectInspector) poi;
int i = 0;
if (objectForField != null) {
i = ioi.get(objectForField);
}
out.writeInt(i);
return;
// Teradata Type: BIGINT
case LONG:
LongObjectInspector loi = (LongObjectInspector) poi;
long l = 0;
if (objectForField != null) {
l = loi.get(objectForField);
}
out.writeLong(l);
return;
// Teradata Type: FLOAT
case DOUBLE:
DoubleObjectInspector doi = (DoubleObjectInspector) poi;
double d = 0;
if (objectForField != null) {
d = doi.get(objectForField);
}
out.writeDouble(d);
return;
// Teradata Type: VARCHAR
case VARCHAR:
HiveVarcharObjectInspector hvoi = (HiveVarcharObjectInspector) poi;
HiveVarcharWritable hv = hvoi.getPrimitiveWritableObject(objectForField);
// assert the length of varchar record fits into the table definition
if (hv != null) {
assert ((VarcharTypeInfo) ti).getLength() >= hv.getHiveVarchar().getCharacterLength();
}
out.writeVarChar(hv);
return;
// Teradata Type: TIMESTAMP
case TIMESTAMP:
TimestampObjectInspector tsoi = (TimestampObjectInspector) poi;
TimestampWritableV2 ts = tsoi.getPrimitiveWritableObject(objectForField);
out.writeTimestamp(ts, getTimeStampByteNum(timestampPrecision));
return;
// Teradata Type: DATE
case DATE:
DateObjectInspector dtoi = (DateObjectInspector) poi;
DateWritableV2 dw = dtoi.getPrimitiveWritableObject(objectForField);
out.writeDate(dw);
return;
// Teradata Type: CHAR
case CHAR:
HiveCharObjectInspector coi = (HiveCharObjectInspector) poi;
HiveCharWritable hc = coi.getPrimitiveWritableObject(objectForField);
// assert the length of char record fits into the table definition
if (hc != null) {
assert ((CharTypeInfo) ti).getLength() >= hc.getHiveChar().getCharacterLength();
}
out.writeChar(hc, getCharByteNum(charCharset) * ((CharTypeInfo) ti).getLength());
return;
// Teradata Type: DECIMAL
case DECIMAL:
DecimalTypeInfo dtype = (DecimalTypeInfo) ti;
int precision = dtype.precision();
int scale = dtype.scale();
HiveDecimalObjectInspector hdoi = (HiveDecimalObjectInspector) poi;
HiveDecimalWritable hd = hdoi.getPrimitiveWritableObject(objectForField);
// assert the precision of decimal record fits into the table definition
if (hd != null) {
assert (dtype.getPrecision() >= hd.precision());
}
out.writeDecimal(hd, getDecimalByteNum(precision), scale);
return;
// Teradata Type: VARBYTE
case BINARY:
BinaryObjectInspector bnoi = (BinaryObjectInspector) poi;
BytesWritable byw = bnoi.getPrimitiveWritableObject(objectForField);
out.writeVarByte(byw);
return;
default:
throw new SerDeException("Unrecognized type: " + poi.getPrimitiveCategory());
}
// Currently, serialization of complex types is not supported
case LIST:
case MAP:
case STRUCT:
default:
throw new SerDeException("Unrecognized type: " + oi.getCategory());
}
}
Aggregations