use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector in project hive by apache.
the class GenericUDFTrunc method initializeDate.
private ObjectInspector initializeDate(ObjectInspector[] arguments) throws UDFArgumentLengthException, UDFArgumentTypeException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[0].getTypeName() + " is passed. as first arguments");
}
if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but " + arguments[1].getTypeName() + " is passed. as second arguments");
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
switch(inputType1) {
case STRING:
case VARCHAR:
case CHAR:
case VOID:
inputType1 = PrimitiveCategory.STRING;
textConverter1 = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
dateWritableConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got " + inputType1);
}
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.STRING_GROUP && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.VOID_GROUP) {
throw new UDFArgumentTypeException(1, "trunc() only takes STRING/CHAR/VARCHAR types as second argument, got " + inputType2);
}
inputType2 = PrimitiveCategory.STRING;
if (arguments[1] instanceof ConstantObjectInspector) {
Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
fmtInput = obj != null ? obj.toString() : null;
} else {
textConverter2 = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector in project hive by apache.
the class GenericUDFDate method initialize.
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException("to_date() requires 1 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != Category.PRIMITIVE) {
throw new UDFArgumentException("to_date() only accepts STRING/TIMESTAMP/DATEWRITABLE types, got " + arguments[0].getTypeName());
}
argumentOI = (PrimitiveObjectInspector) arguments[0];
inputType = argumentOI.getPrimitiveCategory();
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
switch(inputType) {
case VOID:
break;
case CHAR:
case VARCHAR:
case STRING:
inputType = PrimitiveCategory.STRING;
textConverter = ObjectInspectorConverters.getConverter(argumentOI, PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
timestampConverter = new TimestampConverter(argumentOI, PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case TIMESTAMPLOCALTZ:
case DATE:
dateWritableConverter = ObjectInspectorConverters.getConverter(argumentOI, PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentException("TO_DATE() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType);
}
return outputOI;
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector in project hive by apache.
the class TestGenericUDFGreatest method testGreatestMixed.
public void testGreatestMixed() throws HiveException {
GenericUDFGreatest udf = new GenericUDFGreatest();
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
ObjectInspector valueOI3 = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
ObjectInspector valueOI4 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector[] arguments = { valueOI1, valueOI2, valueOI3, valueOI4 };
udf.initialize(arguments);
// string comparisons
runAndVerify(new Object[] { 1, 11.1, Date.valueOf("2015-03-20"), "test" }, "test", udf);
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector in project hive by apache.
the class TestGenericUDFLeast method testLeastTypes.
public void testLeastTypes() throws HiveException {
GenericUDFGreatest udf = new GenericUDFGreatest();
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
ObjectInspector valueOI3 = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
ObjectInspector valueOI4 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
ObjectInspector[] arguments = { valueOI1, valueOI2, valueOI3, valueOI4 };
udf.initialize(arguments);
// string comparisons
runAndVerify(new Object[] { 1, 11.1, Date.valueOf("2015-03-20"), "test" }, "test", udf);
}
use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector in project hive by apache.
the class TestGenericUDFDateSub method testDateWritablepToDate.
public void testDateWritablepToDate() throws HiveException {
GenericUDFDateSub udf = new GenericUDFDateSub();
ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.javaIntObjectInspector;
ObjectInspector[] arguments = { valueOI1, valueOI2 };
udf.initialize(arguments);
DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
DeferredObject valueObj2 = new DeferredJavaObject(new Integer("4"));
DeferredObject[] args = { valueObj1, valueObj2 };
DateWritable output = (DateWritable) udf.evaluate(args);
assertEquals("date_sub() test for DATEWRITABLE failed ", "2009-07-16", output.toString());
// Test with null args
args = new DeferredObject[] { new DeferredJavaObject(null), valueObj2 };
assertNull("date_add() 1st arg null", udf.evaluate(args));
args = new DeferredObject[] { valueObj1, new DeferredJavaObject(null) };
assertNull("date_add() 2nd arg null", udf.evaluate(args));
args = new DeferredObject[] { new DeferredJavaObject(null), new DeferredJavaObject(null) };
assertNull("date_add() both args null", udf.evaluate(args));
}
Aggregations