use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector in project hive by apache.
the class GenericUDF method obtainTimestampConverter.
protected void obtainTimestampConverter(ObjectInspector[] arguments, int i, PrimitiveCategory[] inputTypes, Converter[] converters) throws UDFArgumentTypeException {
PrimitiveObjectInspector inOi = (PrimitiveObjectInspector) arguments[i];
PrimitiveCategory inputType = inOi.getPrimitiveCategory();
ObjectInspector outOi;
switch(inputType) {
case STRING:
case VARCHAR:
case CHAR:
case TIMESTAMP:
case DATE:
case TIMESTAMPLOCALTZ:
break;
default:
throw new UDFArgumentTypeException(i, getFuncName() + " only takes STRING_GROUP or DATE_GROUP types as " + getArgOrder(i) + " argument, got " + inputType);
}
outOi = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
converters[i] = ObjectInspectorConverters.getConverter(inOi, outOi);
inputTypes[i] = inputType;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector in project hive by apache.
the class GenericUDFTrunc method initializeDate.
private ObjectInspector initializeDate(ObjectInspector[] arguments) throws UDFArgumentLengthException, UDFArgumentTypeException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[0].getTypeName() + " is passed. as first arguments");
}
if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but " + arguments[1].getTypeName() + " is passed. as second arguments");
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
switch(inputType1) {
case STRING:
case VARCHAR:
case CHAR:
case VOID:
inputType1 = PrimitiveCategory.STRING;
textConverter1 = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
dateWritableConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got " + inputType1);
}
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.STRING_GROUP && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.VOID_GROUP) {
throw new UDFArgumentTypeException(1, "trunc() only takes STRING/CHAR/VARCHAR types as second argument, got " + inputType2);
}
inputType2 = PrimitiveCategory.STRING;
if (arguments[1] instanceof ConstantObjectInspector) {
Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
fmtInput = obj != null ? obj.toString() : null;
} else {
textConverter2 = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector in project hive by apache.
the class GenericUDFDate method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException("to_date() requires 1 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentException("to_date() only accepts STRING/TIMESTAMP/DATEWRITABLE types, got " + arguments[0].getTypeName());
}
argumentOI = (PrimitiveObjectInspector) arguments[0];
inputType = argumentOI.getPrimitiveCategory();
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
switch(inputType) {
case VOID:
break;
case CHAR:
case VARCHAR:
case STRING:
inputType = PrimitiveCategory.STRING;
textConverter = ObjectInspectorConverters.getConverter(argumentOI, PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
timestampConverter = new TimestampConverter(argumentOI, PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case TIMESTAMPLOCALTZ:
case DATE:
dateWritableConverter = ObjectInspectorConverters.getConverter(argumentOI, PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
}
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector in project hive by apache.
the class TestGenericUDFOPPlus method testIntervalDayTimePlusTimestamp.
@Test
public void testIntervalDayTimePlusTimestamp() throws Exception {
GenericUDFOPPlus udf = new GenericUDFOPPlus();
HiveIntervalDayTimeWritable left = new HiveIntervalDayTimeWritable(HiveIntervalDayTime.valueOf("1 2:3:4.567"));
TimestampWritable right = new TimestampWritable(Timestamp.valueOf("2001-01-01 00:00:00"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableHiveIntervalDayTimeObjectInspector, PrimitiveObjectInspectorFactory.writableTimestampObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.timestampTypeInfo, oi.getTypeInfo());
TimestampWritable res = (TimestampWritable) udf.evaluate(args);
Assert.assertEquals(Timestamp.valueOf("2001-01-02 2:3:4.567"), res.getTimestamp());
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableTimestampObjectInspector in project hive by apache.
the class TestGenericUDFDateDiff method testTimestampToDate.
public void testTimestampToDate() throws HiveException {
GenericUDFDateDiff udf = new GenericUDFDateDiff();
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
ObjectInspector[] arguments = { valueOI1, valueOI2 };
udf.initialize(arguments);
DeferredObject valueObj1 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, 20, 0, 0, 0, 0)));
DeferredObject valueObj2 = new DeferredJavaObject(new TimestampWritable(new Timestamp(109, 06, 17, 0, 0, 0, 0)));
DeferredObject[] args = { valueObj1, valueObj2 };
IntWritable output = (IntWritable) udf.evaluate(args);
assertEquals("datediff() test for TIMESTAMP failed ", "3", output.toString());
// Test with null args
args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
assertNull("date_add() 1st arg null", udf.evaluate(args));
args = new DeferredObject[] { valueObj1, new DeferredJavaObject(null) };
assertNull("date_add() 2nd arg null", udf.evaluate(args));
args = new DeferredObject[] { new DeferredJavaObject(null), new DeferredJavaObject(null) };
assertNull("date_add() both args null", udf.evaluate(args));
}
Aggregations