use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class LazySimpleSerializeWrite method writeTimestamp.
/*
* TIMESTAMP.
*/
@Override
public void writeTimestamp(Timestamp v) throws IOException {
beginPrimitive();
if (timestampWritable == null) {
timestampWritable = new TimestampWritableV2();
}
timestampWritable.set(v);
LazyTimestamp.writeUTF8(output, timestampWritable);
finishPrimitive();
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class DeserializeRead method allocateCurrentWritable.
/*
* This class is used to read one field at a time. Simple fields like long, double, int are read
* into to primitive current* members; the non-simple field types like Date, Timestamp, etc, are
* read into a current object that this method will allocate.
*
* This method handles complex type fields by recursively calling this method.
*/
private void allocateCurrentWritable(TypeInfo typeInfo) {
switch(typeInfo.getCategory()) {
case PRIMITIVE:
switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
case DATE:
if (currentDateWritable == null) {
currentDateWritable = new DateWritableV2();
}
break;
case TIMESTAMP:
if (currentTimestampWritable == null) {
currentTimestampWritable = new TimestampWritableV2();
}
break;
case INTERVAL_YEAR_MONTH:
if (currentHiveIntervalYearMonthWritable == null) {
currentHiveIntervalYearMonthWritable = new HiveIntervalYearMonthWritable();
}
break;
case INTERVAL_DAY_TIME:
if (currentHiveIntervalDayTimeWritable == null) {
currentHiveIntervalDayTimeWritable = new HiveIntervalDayTimeWritable();
}
break;
case DECIMAL:
if (currentHiveDecimalWritable == null) {
currentHiveDecimalWritable = new HiveDecimalWritable();
}
break;
default:
}
break;
case LIST:
allocateCurrentWritable(((ListTypeInfo) typeInfo).getListElementTypeInfo());
break;
case MAP:
allocateCurrentWritable(((MapTypeInfo) typeInfo).getMapKeyTypeInfo());
allocateCurrentWritable(((MapTypeInfo) typeInfo).getMapValueTypeInfo());
break;
case STRUCT:
for (TypeInfo fieldTypeInfo : ((StructTypeInfo) typeInfo).getAllStructFieldTypeInfos()) {
allocateCurrentWritable(fieldTypeInfo);
}
break;
case UNION:
for (TypeInfo fieldTypeInfo : ((UnionTypeInfo) typeInfo).getAllUnionObjectTypeInfos()) {
allocateCurrentWritable(fieldTypeInfo);
}
break;
default:
throw new RuntimeException("Unexpected category " + typeInfo.getCategory());
}
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TeradataBinarySerde method serializeField.
private void serializeField(Object objectForField, ObjectInspector oi, TypeInfo ti) throws IOException, SerDeException {
switch(oi.getCategory()) {
case PRIMITIVE:
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
switch(poi.getPrimitiveCategory()) {
// Teradata Type: BYTEINT
case BYTE:
ByteObjectInspector boi = (ByteObjectInspector) poi;
byte b = 0;
if (objectForField != null) {
b = boi.get(objectForField);
}
out.write(b);
return;
// Teradata Type: SMALLINT
case SHORT:
ShortObjectInspector spoi = (ShortObjectInspector) poi;
short s = 0;
if (objectForField != null) {
s = spoi.get(objectForField);
}
out.writeShort(s);
return;
// Teradata Type: INT
case INT:
IntObjectInspector ioi = (IntObjectInspector) poi;
int i = 0;
if (objectForField != null) {
i = ioi.get(objectForField);
}
out.writeInt(i);
return;
// Teradata Type: BIGINT
case LONG:
LongObjectInspector loi = (LongObjectInspector) poi;
long l = 0;
if (objectForField != null) {
l = loi.get(objectForField);
}
out.writeLong(l);
return;
// Teradata Type: FLOAT
case DOUBLE:
DoubleObjectInspector doi = (DoubleObjectInspector) poi;
double d = 0;
if (objectForField != null) {
d = doi.get(objectForField);
}
out.writeDouble(d);
return;
// Teradata Type: VARCHAR
case VARCHAR:
HiveVarcharObjectInspector hvoi = (HiveVarcharObjectInspector) poi;
HiveVarcharWritable hv = hvoi.getPrimitiveWritableObject(objectForField);
// assert the length of varchar record fits into the table definition
if (hv != null) {
assert ((VarcharTypeInfo) ti).getLength() >= hv.getHiveVarchar().getCharacterLength();
}
out.writeVarChar(hv);
return;
// Teradata Type: TIMESTAMP
case TIMESTAMP:
TimestampObjectInspector tsoi = (TimestampObjectInspector) poi;
TimestampWritableV2 ts = tsoi.getPrimitiveWritableObject(objectForField);
out.writeTimestamp(ts, getTimeStampByteNum(timestampPrecision));
return;
// Teradata Type: DATE
case DATE:
DateObjectInspector dtoi = (DateObjectInspector) poi;
DateWritableV2 dw = dtoi.getPrimitiveWritableObject(objectForField);
out.writeDate(dw);
return;
// Teradata Type: CHAR
case CHAR:
HiveCharObjectInspector coi = (HiveCharObjectInspector) poi;
HiveCharWritable hc = coi.getPrimitiveWritableObject(objectForField);
// assert the length of char record fits into the table definition
if (hc != null) {
assert ((CharTypeInfo) ti).getLength() >= hc.getHiveChar().getCharacterLength();
}
out.writeChar(hc, getCharByteNum(charCharset) * ((CharTypeInfo) ti).getLength());
return;
// Teradata Type: DECIMAL
case DECIMAL:
DecimalTypeInfo dtype = (DecimalTypeInfo) ti;
int precision = dtype.precision();
int scale = dtype.scale();
HiveDecimalObjectInspector hdoi = (HiveDecimalObjectInspector) poi;
HiveDecimalWritable hd = hdoi.getPrimitiveWritableObject(objectForField);
// assert the precision of decimal record fits into the table definition
if (hd != null) {
assert (dtype.getPrecision() >= hd.precision());
}
out.writeDecimal(hd, getDecimalByteNum(precision), scale);
return;
// Teradata Type: VARBYTE
case BINARY:
BinaryObjectInspector bnoi = (BinaryObjectInspector) poi;
BytesWritable byw = bnoi.getPrimitiveWritableObject(objectForField);
out.writeVarByte(byw);
return;
default:
throw new SerDeException("Unrecognized type: " + poi.getPrimitiveCategory());
}
// Currently, serialization of complex types is not supported
case LIST:
case MAP:
case STRUCT:
default:
throw new SerDeException("Unrecognized type: " + oi.getCategory());
}
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class WritableHiveCharObjectInspector method getPrimitiveWritableObject.
@Override
public HiveCharWritable getPrimitiveWritableObject(Object o) {
// then output new writable with correct params.
if (o == null) {
return null;
}
if ((o instanceof Text) || (o instanceof TimestampWritableV2) || (o instanceof HiveDecimalWritable) || (o instanceof DoubleWritable) || (o instanceof FloatWritable) || (o instanceof LongWritable) || (o instanceof IntWritable) || (o instanceof BooleanWritable)) {
String str = o.toString();
HiveCharWritable hcw = new HiveCharWritable();
hcw.set(str, ((CharTypeInfo) typeInfo).getLength());
return hcw;
}
HiveCharWritable writable = ((HiveCharWritable) o);
if (doesWritableMatchTypeParams((HiveCharWritable) o)) {
return writable;
}
return getWritableWithParams(writable);
}
use of org.apache.hadoop.hive.serde2.io.TimestampWritableV2 in project hive by apache.
the class TestGenericUDFAddMonths method runAndVerify.
private void runAndVerify(Timestamp ts, int months, Text dateFormat, String expResult, GenericUDF udf) throws HiveException {
DeferredObject valueObj0 = new DeferredJavaObject(new TimestampWritableV2(ts));
DeferredObject valueObj1 = new DeferredJavaObject(new IntWritable(months));
DeferredObject valueObj2 = new DeferredJavaObject(dateFormat);
DeferredObject[] args = { valueObj0, valueObj1, valueObj2 };
Text output = (Text) udf.evaluate(args);
assertEquals("add_months() test for timestamp", expResult, output != null ? output.toString() : null);
}
Aggregations