use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector in project hive by apache.
the class GenericUDFTimestamp method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 1) {
throw new UDFArgumentLengthException("The function TIMESTAMP requires at least one argument, got " + arguments.length);
}
SessionState ss = SessionState.get();
if (ss != null) {
intToTimestampInSeconds = ss.getConf().getBoolVar(ConfVars.HIVE_INT_TIMESTAMP_CONVERSION_IN_SECONDS);
}
try {
argumentOI = (PrimitiveObjectInspector) arguments[0];
} catch (ClassCastException e) {
throw new UDFArgumentException("The function TIMESTAMP takes only primitive types");
}
tc = new TimestampConverter(argumentOI, PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
tc.setIntToTimestampInSeconds(intToTimestampInSeconds);
return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector in project hive by apache.
the class GenericUDFDateDiff method checkArguments.
private Converter checkArguments(ObjectInspector[] arguments, int i) throws UDFArgumentException {
if (arguments[i].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[i].getTypeName() + " is passed. as first arguments");
}
PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
Converter converter;
switch(inputType) {
case STRING:
case VARCHAR:
case CHAR:
converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
converter = new TimestampConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
converter = ObjectInspectorConverters.getConverter((PrimitiveObjectInspector) arguments[i], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentException(" DATEDIFF() only takes STRING/TIMESTAMP/DATEWRITABLE types as " + (i + 1) + "-th argument, got " + inputType);
}
return converter;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector in project hive by apache.
the class TestGenericUDFDate method testTimestampToDate.
public void testTimestampToDate() throws HiveException {
GenericUDFDate udf = new GenericUDFDate();
ObjectInspector valueOI = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
ObjectInspector[] arguments = { valueOI };
udf.initialize(arguments);
DeferredObject valueObj = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, 30, 4, 17, 52, 0)));
DeferredObject[] args = { valueObj };
DateWritable output = (DateWritable) udf.evaluate(args);
assertEquals("to_date() test for TIMESTAMP failed ", "2009-07-30", output.toString());
// Try with null args
DeferredObject[] nullArgs = { new DeferredJavaObject(null) };
output = (DateWritable) udf.evaluate(nullArgs);
assertNull("to_date() with null TIMESTAMP", output);
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector in project hive by apache.
the class TestGenericUDFDateAdd method testTimestampToDate.
public void testTimestampToDate() throws HiveException {
GenericUDFDateAdd udf = new GenericUDFDateAdd();
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ObjectInspector[] arguments = { valueOI1, valueOI2 };
udf.initialize(arguments);
DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, 20, 4, 17, 52, 0)));
DeferredObject valueObj2 = new DeferredJavaObject(new Integer("3"));
DeferredObject[] args = { valueObj1, valueObj2 };
DateWritable output = (DateWritable) udf.evaluate(args);
assertEquals("date_add() test for TIMESTAMP failed ", "2009-07-23", output.toString());
// Test with null args
args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
assertNull("date_add() 1st arg null", udf.evaluate(args));
args = new DeferredObject[] { valueObj1, new DeferredJavaObject(null) };
assertNull("date_add() 2nd arg null", udf.evaluate(args));
args = new DeferredObject[] { new DeferredJavaObject(null), new DeferredJavaObject(null) };
assertNull("date_add() both args null", udf.evaluate(args));
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector in project hive by apache.
the class TestGenericUDFOPMinus method testTimestampMinusIntervalYearMonth.
@Test
public void testTimestampMinusIntervalYearMonth() throws Exception {
GenericUDFOPMinus udf = new GenericUDFOPMinus();
TimestampWritable left = new TimestampWritable(Timestamp.valueOf("2004-01-15 01:02:03.123456789"));
HiveIntervalYearMonthWritable right = new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-2"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableTimestampObjectInspector, PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
TimestampWritable res = (TimestampWritable) udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-11-15 01:02:03.123456789"), res.getTimestamp());
}
Aggregations