use of org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable in project hive by apache.
the class TestGenericUDFOPMinus method testDateMinusIntervalYearMonth.
@Test
public void testDateMinusIntervalYearMonth() throws Exception {
GenericUDFOPMinus udf = new GenericUDFOPMinus();
DateWritable left = new DateWritable(Date.valueOf("2004-02-15"));
HiveIntervalYearMonthWritable right = new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableDateObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
DateWritable res = (DateWritable) udf.evaluate(args);
Assert.assertEquals(Date.valueOf("2001-06-15"), res.get());
}
use of org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable in project hive by apache.
the class TestGenericUDFOPMinus method testTimestampMinusIntervalYearMonth.
@Test
public void testTimestampMinusIntervalYearMonth() throws Exception {
GenericUDFOPMinus udf = new GenericUDFOPMinus();
TimestampWritable left = new TimestampWritable(Timestamp.valueOf("2004-01-15 01:02:03.123456789"));
HiveIntervalYearMonthWritable right = new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableTimestampObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
TimestampWritable res = (TimestampWritable) udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-11-15 01:02:03.123456789"), res.getTimestamp());
}
use of org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable in project hive by apache.
the class TestGenericUDFOPPlus method testIntervalYearMonthPlusIntervalYearMonth.
@Test
public void testIntervalYearMonthPlusIntervalYearMonth() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
HiveIntervalYearMonthWritable left = new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("1-2"));
HiveIntervalYearMonthWritable right = new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("1-11"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intervalYearMonthTypeInfo, oi.getTypeInfo());
HiveIntervalYearMonthWritable res = (HiveIntervalYearMonthWritable) udf.evaluate(args);
Assert.assertEquals(HiveIntervalYearMonth.valueOf("3-1"), res.getHiveIntervalYearMonth());
}
use of org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable in project hive by apache.
the class TestGenericUDFOPPlus method testIntervalYearMonthPlusTimestamp.
@Test
public void testIntervalYearMonthPlusTimestamp() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
HiveIntervalYearMonthWritable left = new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
TimestampWritable right = new TimestampWritable(Timestamp.valueOf("2001-11-15 01:02:03.123456789"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector, PrimitiveObjectInspectorFactory.writableTimestampObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
TimestampWritable res = (TimestampWritable) udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2004-01-15 01:02:03.123456789"), res.getTimestamp());
}
use of org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable in project hive by apache.
the class VectorVerifyFast method serializeWrite.
public static void serializeWrite(SerializeWrite serializeWrite, TypeInfo typeInfo, Object object) throws IOException {
if (object == null) {
serializeWrite.writeNull();
return;
}
switch(typeInfo.getCategory()) {
case PRIMITIVE:
{
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) typeInfo;
switch(primitiveTypeInfo.getPrimitiveCategory()) {
case BOOLEAN:
{
boolean value = ((BooleanWritable) object).get();
serializeWrite.writeBoolean(value);
}
break;
case BYTE:
{
byte value = ((ByteWritable) object).get();
serializeWrite.writeByte(value);
}
break;
case SHORT:
{
short value = ((ShortWritable) object).get();
serializeWrite.writeShort(value);
}
break;
case INT:
{
int value = ((IntWritable) object).get();
serializeWrite.writeInt(value);
}
break;
case LONG:
{
long value = ((LongWritable) object).get();
serializeWrite.writeLong(value);
}
break;
case FLOAT:
{
float value = ((FloatWritable) object).get();
serializeWrite.writeFloat(value);
}
break;
case DOUBLE:
{
double value = ((DoubleWritable) object).get();
serializeWrite.writeDouble(value);
}
break;
case STRING:
{
Text value = (Text) object;
byte[] stringBytes = value.getBytes();
int stringLength = stringBytes.length;
serializeWrite.writeString(stringBytes, 0, stringLength);
}
break;
case CHAR:
{
HiveChar value = ((HiveCharWritable) object).getHiveChar();
serializeWrite.writeHiveChar(value);
}
break;
case VARCHAR:
{
HiveVarchar value = ((HiveVarcharWritable) object).getHiveVarchar();
serializeWrite.writeHiveVarchar(value);
}
break;
case DECIMAL:
{
HiveDecimal value = ((HiveDecimalWritable) object).getHiveDecimal();
DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
serializeWrite.writeHiveDecimal(value, decTypeInfo.scale());
}
break;
case DATE:
{
Date value = ((DateWritable) object).get();
serializeWrite.writeDate(value);
}
break;
case TIMESTAMP:
{
Timestamp value = ((TimestampWritable) object).getTimestamp();
serializeWrite.writeTimestamp(value);
}
break;
case INTERVAL_YEAR_MONTH:
{
HiveIntervalYearMonth value = ((HiveIntervalYearMonthWritable) object).getHiveIntervalYearMonth();
serializeWrite.writeHiveIntervalYearMonth(value);
}
break;
case INTERVAL_DAY_TIME:
{
HiveIntervalDayTime value = ((HiveIntervalDayTimeWritable) object).getHiveIntervalDayTime();
serializeWrite.writeHiveIntervalDayTime(value);
}
break;
case BINARY:
{
BytesWritable byteWritable = (BytesWritable) object;
byte[] binaryBytes = byteWritable.getBytes();
int length = byteWritable.getLength();
serializeWrite.writeBinary(binaryBytes, 0, length);
}
break;
default:
throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory().name());
}
}
break;
case LIST:
{
ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
ArrayList<Object> elements = (ArrayList<Object>) object;
serializeWrite.beginList(elements);
boolean isFirst = true;
for (Object elementObject : elements) {
if (isFirst) {
isFirst = false;
} else {
serializeWrite.separateList();
}
if (elementObject == null) {
serializeWrite.writeNull();
} else {
serializeWrite(serializeWrite, elementTypeInfo, elementObject);
}
}
serializeWrite.finishList();
}
break;
case MAP:
{
MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo();
TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo();
HashMap<Object, Object> hashMap = (HashMap<Object, Object>) object;
serializeWrite.beginMap(hashMap);
boolean isFirst = true;
for (Map.Entry<Object, Object> entry : hashMap.entrySet()) {
if (isFirst) {
isFirst = false;
} else {
serializeWrite.separateKeyValuePair();
}
if (entry.getKey() == null) {
serializeWrite.writeNull();
} else {
serializeWrite(serializeWrite, keyTypeInfo, entry.getKey());
}
serializeWrite.separateKey();
if (entry.getValue() == null) {
serializeWrite.writeNull();
} else {
serializeWrite(serializeWrite, valueTypeInfo, entry.getValue());
}
}
serializeWrite.finishMap();
}
break;
case STRUCT:
{
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
ArrayList<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
ArrayList<Object> fieldValues = (ArrayList<Object>) object;
final int size = fieldValues.size();
serializeWrite.beginStruct(fieldValues);
boolean isFirst = true;
for (int i = 0; i < size; i++) {
if (isFirst) {
isFirst = false;
} else {
serializeWrite.separateStruct();
}
serializeWrite(serializeWrite, fieldTypeInfos.get(i), fieldValues.get(i));
}
serializeWrite.finishStruct();
}
break;
case UNION:
{
UnionTypeInfo unionTypeInfo = (UnionTypeInfo) typeInfo;
List<TypeInfo> fieldTypeInfos = unionTypeInfo.getAllUnionObjectTypeInfos();
final int size = fieldTypeInfos.size();
StandardUnionObjectInspector.StandardUnion standardUnion = (StandardUnionObjectInspector.StandardUnion) object;
byte tag = standardUnion.getTag();
serializeWrite.beginUnion(tag);
serializeWrite(serializeWrite, fieldTypeInfos.get(tag), standardUnion.getObject());
serializeWrite.finishUnion();
}
break;
default:
throw new Error("Unknown category " + typeInfo.getCategory().name());
}
}
Aggregations