use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.
the class GenericUDFOPDTIPlus method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentException(opName + " requires two arguments.");
}
PrimitiveObjectInspector resultOI = null;
for (int i = 0; i < 2; i++) {
Category category = arguments[i].getCategory();
if (category != Category.PRIMITIVE) {
throw new UDFArgumentTypeException(i, "The " + GenericUDFUtils.getOrdinal(i + 1) + " argument of " + opName + " is expected to a " + Category.PRIMITIVE.toString().toLowerCase() + " type, but " + category.toString().toLowerCase() + " is found");
}
}
inputOIs = new PrimitiveObjectInspector[] { (PrimitiveObjectInspector) arguments[0], (PrimitiveObjectInspector) arguments[1] };
PrimitiveObjectInspector leftOI = inputOIs[0];
PrimitiveObjectInspector rightOI = inputOIs[1];
// IntervalDayTime + Timestamp = Timestamp (operands reversible)
if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
plusOpType = OperationType.INTERVALYM_PLUS_INTERVALYM;
intervalArg1Idx = 0;
intervalArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalYearMonthTypeInfo);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
plusOpType = OperationType.INTERVALYM_PLUS_DATE;
dtArgIdx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.dateTypeInfo);
} else if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.DATE)) {
plusOpType = OperationType.INTERVALYM_PLUS_DATE;
intervalArg1Idx = 0;
dtArgIdx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.dateTypeInfo);
} else if (checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_YEAR_MONTH)) {
plusOpType = OperationType.INTERVALYM_PLUS_TIMESTAMP;
dtArgIdx = 0;
intervalArg1Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
} else if (checkArgs(PrimitiveCategory.INTERVAL_YEAR_MONTH, PrimitiveCategory.TIMESTAMP)) {
plusOpType = OperationType.INTERVALYM_PLUS_TIMESTAMP;
intervalArg1Idx = 0;
dtArgIdx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
} else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.INTERVAL_DAY_TIME)) {
plusOpType = OperationType.INTERVALDT_PLUS_INTERVALDT;
intervalArg1Idx = 0;
intervalArg2Idx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.intervalDayTimeTypeInfo);
} else if (checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.DATE) || checkArgs(PrimitiveCategory.INTERVAL_DAY_TIME, PrimitiveCategory.TIMESTAMP)) {
plusOpType = OperationType.INTERVALDT_PLUS_TIMESTAMP;
intervalArg1Idx = 0;
dtArgIdx = 1;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
} else if (checkArgs(PrimitiveCategory.DATE, PrimitiveCategory.INTERVAL_DAY_TIME) || checkArgs(PrimitiveCategory.TIMESTAMP, PrimitiveCategory.INTERVAL_DAY_TIME)) {
plusOpType = OperationType.INTERVALDT_PLUS_TIMESTAMP;
intervalArg1Idx = 1;
dtArgIdx = 0;
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.timestampTypeInfo);
dtConverter = ObjectInspectorConverters.getConverter(leftOI, resultOI);
} else {
// Unsupported types - error
List<TypeInfo> argTypeInfos = new ArrayList<TypeInfo>(2);
argTypeInfos.add(leftOI.getTypeInfo());
argTypeInfos.add(rightOI.getTypeInfo());
throw new NoMatchingMethodException(this.getClass(), argTypeInfos, null);
}
return resultOI;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.
the class TestETypeConverter method testTimestampInt96ConverterChicago.
@Test
public void testTimestampInt96ConverterChicago() {
PrimitiveConverter converter;
parent.metadata.put(ParquetTableUtils.PARQUET_INT96_WRITE_ZONE_PROPERTY, "America/Chicago");
converter = getETypeConverter(parent, PrimitiveTypeName.INT96, TypeInfoFactory.timestampTypeInfo);
converter.addBinary(NanoTimeUtils.getNanoTime(ts, Calendar.getInstance(TimeZone.getTimeZone("America/Chicago"))).toBinary());
parent.assertWritableValue(new TimestampWritable(ts));
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.
the class TestETypeConverter method testTimestampInt96ConverterEtc.
@Test
public void testTimestampInt96ConverterEtc() {
PrimitiveConverter converter;
parent.metadata.put(ParquetTableUtils.PARQUET_INT96_WRITE_ZONE_PROPERTY, "Etc/GMT-12");
converter = getETypeConverter(parent, PrimitiveTypeName.INT96, TypeInfoFactory.timestampTypeInfo);
converter.addBinary(NanoTimeUtils.getNanoTime(ts, Calendar.getInstance(TimeZone.getTimeZone("Etc/GMT-12"))).toBinary());
parent.assertWritableValue(new TimestampWritable(ts));
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.
the class TestGenericUDFOPPlus method testTimestampPlusIntervalYearMonth.
@Test
public void testTimestampPlusIntervalYearMonth() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
TimestampWritable left = new TimestampWritable(Timestamp.valueOf("2001-11-15 01:02:03.123456789"));
HiveIntervalYearMonthWritable right = new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableTimestampObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
TimestampWritable res = (TimestampWritable) udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2004-01-15 01:02:03.123456789"), res.getTimestamp());
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.timestampTypeInfo in project hive by apache.
the class TestGenericUDFOPPlus method testTimestampPlusIntervalDayTime.
@Test
public void testTimestampPlusIntervalDayTime() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
TimestampWritable left = new TimestampWritable(Timestamp.valueOf("2001-01-01 00:00:00"));
HiveIntervalDayTimeWritable right = new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableTimestampObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
TimestampWritable res = (TimestampWritable) udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
}
Aggregations