Search in sources :

Example 1 with PrimitiveObjectInspectorFactory.writableDateObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector in project hive by apache.

the class GenericUDFTrunc method initializeDate.

private ObjectInspector initializeDate(ObjectInspector[] arguments) throws UDFArgumentLengthException, UDFArgumentTypeException {
    if (arguments.length != 2) {
        throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but " + arguments[0].getTypeName() + " is passed. as first arguments");
    }
    if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but " + arguments[1].getTypeName() + " is passed. as second arguments");
    }
    ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
    switch(inputType1) {
        case STRING:
        case VARCHAR:
        case CHAR:
        case VOID:
            inputType1 = PrimitiveCategory.STRING;
            textConverter1 = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
            break;
        case TIMESTAMP:
            timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0], PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
            break;
        case DATE:
            dateWritableConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDateObjectInspector);
            break;
        default:
            throw new UDFArgumentTypeException(0, "TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got " + inputType1);
    }
    inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
    if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.STRING_GROUP && PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.VOID_GROUP) {
        throw new UDFArgumentTypeException(1, "trunk() only takes STRING/CHAR/VARCHAR types as second argument, got " + inputType2);
    }
    inputType2 = PrimitiveCategory.STRING;
    if (arguments[1] instanceof ConstantObjectInspector) {
        Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
        fmtInput = obj != null ? obj.toString() : null;
    } else {
        textConverter2 = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableStringObjectInspector);
    }
    return outputOI;
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) TimestampConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)

Example 2 with PrimitiveObjectInspectorFactory.writableDateObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector in project hive by apache.

the class GenericUDFToDate method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length < 1) {
        throw new UDFArgumentLengthException("The function CAST as DATE requires at least one argument, got " + arguments.length);
    }
    try {
        argumentOI = (PrimitiveObjectInspector) arguments[0];
        PrimitiveCategory pc = argumentOI.getPrimitiveCategory();
        PrimitiveGrouping pg = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(pc);
        switch(pg) {
            case DATE_GROUP:
            case STRING_GROUP:
            case VOID_GROUP:
                break;
            default:
                throw new UDFArgumentException("CAST as DATE only allows date,string, or timestamp types");
        }
    } catch (ClassCastException e) {
        throw new UDFArgumentException("The function CAST as DATE takes only primitive types");
    }
    dc = new DateConverter(argumentOI, PrimitiveObjectInspectorFactory.writableDateObjectInspector);
    return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) DateConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.DateConverter) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) PrimitiveGrouping(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping)

Example 3 with PrimitiveObjectInspectorFactory.writableDateObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector in project hive by apache.

the class TestGenericUDFSortArray method testSortStruct.

@Test
public void testSortStruct() throws HiveException {
    ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(asList("f1", "f2", "f3", "f4"), asList(PrimitiveObjectInspectorFactory.writableStringObjectInspector, PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.writableDateObjectInspector, ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableIntObjectInspector)))) };
    udf.initialize(inputOIs);
    Object i1 = asList(new Text("a"), new DoubleWritable(3.1415), new DateWritable(new Date(2015, 5, 26)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4)));
    Object i2 = asList(new Text("b"), new DoubleWritable(3.14), new DateWritable(new Date(2015, 5, 26)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4)));
    Object i3 = asList(new Text("a"), new DoubleWritable(3.1415), new DateWritable(new Date(2015, 5, 25)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(5)));
    Object i4 = asList(new Text("a"), new DoubleWritable(3.1415), new DateWritable(new Date(2015, 5, 25)), asList(new IntWritable(1), new IntWritable(3), new IntWritable(2), new IntWritable(4)));
    runAndVerify(asList(i1, i2, i3, i4), asList(i4, i3, i1, i2));
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Text(org.apache.hadoop.io.Text) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Date(java.sql.Date) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.junit.Test)

Example 4 with PrimitiveObjectInspectorFactory.writableDateObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector in project hive by apache.

the class TestGenericUDFOPPlus method testIntervalYearMonthPlusDate.

@Test
public void testIntervalYearMonthPlusDate() throws Exception {
    GenericUDFOPPlus udf = new GenericUDFOPPlus();
    HiveIntervalYearMonthWritable left = new HiveIntervalYearMonthWritable(HiveIntervalYearMonth.valueOf("2-8"));
    DateWritable right = new DateWritable(Date.valueOf("2001-06-15"));
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableHiveIntervalYearMonthObjectInspector, PrimitiveObjectInspectorFactory.writableDateObjectInspector };
    DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.dateTypeInfo, oi.getTypeInfo());
    DateWritable res = (DateWritable) udf.evaluate(args);
    Assert.assertEquals(Date.valueOf("2004-02-15"), res.get());
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) Test(org.junit.Test)

Example 5 with PrimitiveObjectInspectorFactory.writableDateObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector in project hive by apache.

the class TestGenericUDFQuarter method testQuarterDt.

public void testQuarterDt() throws HiveException {
    GenericUDFQuarter udf = new GenericUDFQuarter();
    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
    ObjectInspector[] arguments = { valueOI0 };
    udf.initialize(arguments);
    // positive Unix time
    runAndVerifyDt("2014-01-01", 1, udf);
    runAndVerifyDt("2014-02-10", 1, udf);
    runAndVerifyDt("2014-03-31", 1, udf);
    runAndVerifyDt("2014-04-02", 2, udf);
    runAndVerifyDt("2014-05-28", 2, udf);
    runAndVerifyDt("2016-06-03", 2, udf);
    runAndVerifyDt("2016-07-28", 3, udf);
    runAndVerifyDt("2016-08-29", 3, udf);
    runAndVerifyDt("2016-09-29", 3, udf);
    runAndVerifyDt("2016-10-29", 4, udf);
    runAndVerifyDt("2016-11-29", 4, udf);
    runAndVerifyDt("2016-12-31", 4, udf);
    // negative Unix time
    runAndVerifyDt("1966-01-01", 1, udf);
    runAndVerifyDt("1966-03-31", 1, udf);
    runAndVerifyDt("1966-04-01", 2, udf);
    runAndVerifyDt("1966-12-31", 4, udf);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)

Aggregations

ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)32 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)22 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)19 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)19 Date (java.sql.Date)13 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)13 Test (org.junit.Test)9 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)5 Text (org.apache.hadoop.io.Text)5 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)4 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)4 TimestampConverter (org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter)4 GenericUDFDateAdd (org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateAdd)3 GenericUDFDateSub (org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateSub)3 HiveIntervalDayTimeWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable)3 HiveIntervalYearMonthWritable (org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable)3 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)3 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)3 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)2 IntWritable (org.apache.hadoop.io.IntWritable)2