Search in sources :

Example 36 with PrimitiveObjectInspectorFactory.writableLongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector in project hive by apache.

the class GenericUDFTrunc method initializeNumber.

private ObjectInspector initializeNumber(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length < 1 || arguments.length > 2) {
        throw new UDFArgumentLengthException("TRUNC requires one or two argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(0, "TRUNC input only takes primitive types, got " + arguments[0].getTypeName());
    }
    inputOI = (PrimitiveObjectInspector) arguments[0];
    if (arguments.length == 2) {
        if (arguments[1].getCategory() != Category.PRIMITIVE) {
            throw new UDFArgumentTypeException(1, "TRUNC second argument only takes primitive types, got " + arguments[1].getTypeName());
        }
        inputScaleOI = (PrimitiveObjectInspector) arguments[1];
        inputSacleConst = arguments[1] instanceof ConstantObjectInspector;
        if (inputSacleConst) {
            try {
                Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
                fmtInput = obj != null ? obj.toString() : null;
                scale = Integer.parseInt(fmtInput);
            } catch (Exception e) {
                throw new UDFArgumentException("TRUNC input only takes integer values, got " + fmtInput);
            }
        } else {
            switch(inputScaleOI.getPrimitiveCategory()) {
                case BYTE:
                    byteConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableByteObjectInspector);
                    break;
                case SHORT:
                    shortConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableShortObjectInspector);
                    break;
                case INT:
                    intConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
                    break;
                case LONG:
                    longConverter = ObjectInspectorConverters.getConverter(arguments[1], PrimitiveObjectInspectorFactory.writableLongObjectInspector);
                    break;
                default:
                    throw new UDFArgumentTypeException(1, getFuncName().toUpperCase() + " second argument only takes integer values");
            }
        }
    }
    inputType1 = inputOI.getPrimitiveCategory();
    ObjectInspector outputOI = null;
    switch(inputType1) {
        case DECIMAL:
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputType1);
            break;
        case VOID:
        case BYTE:
        case SHORT:
        case INT:
        case LONG:
        case FLOAT:
        case DOUBLE:
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputType1);
            break;
        default:
            throw new UDFArgumentTypeException(0, "Only numeric or string group data types are allowed for TRUNC function. Got " + inputType1.name());
    }
    return outputOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 37 with PrimitiveObjectInspectorFactory.writableLongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector in project hive by apache.

the class TestGenericUDFFromUnixTime method testTimestampOtherTimezone.

@Test
public void testTimestampOtherTimezone() throws HiveException {
    ObjectInspector valueLongOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
    GenericUDFFromUnixTime udf = new GenericUDFFromUnixTime();
    ObjectInspector[] args = { valueLongOI };
    udf.initialize(args);
    Timestamp ts = Timestamp.valueOf("1969-12-31 15:59:46");
    TimestampTZ tstz1 = TimestampTZUtil.convert(ts, ZoneId.of("America/Los_Angeles"));
    TimestampTZ tstz2 = TimestampTZUtil.convert(ts, ZoneId.of("America/New_York"));
    TimestampTZ tstz3 = TimestampTZUtil.convert(ts, ZoneId.of("Europe/London"));
    TimestampTZ tstz4 = TimestampTZUtil.convert(ts, ZoneId.of("Europe/Rome"));
    runAndVerify(udf, new LongWritable(tstz1.getEpochSecond()), new Text("1969-12-31 15:59:46"));
    runAndVerify(udf, new LongWritable(tstz2.getEpochSecond()), new Text("1969-12-31 12:59:46"));
    runAndVerify(udf, new LongWritable(tstz3.getEpochSecond()), new Text("1969-12-31 06:59:46"));
    runAndVerify(udf, new LongWritable(tstz4.getEpochSecond()), new Text("1969-12-31 06:59:46"));
}
Also used : TimestampTZ(org.apache.hadoop.hive.common.type.TimestampTZ) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Text(org.apache.hadoop.io.Text) LongWritable(org.apache.hadoop.io.LongWritable) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Test(org.junit.Test)

Example 38 with PrimitiveObjectInspectorFactory.writableLongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector in project hive by apache.

the class TestGenericUDFFromUnixTime method testTimestampWithArg2.

@Test
public void testTimestampWithArg2() throws HiveException {
    ObjectInspector valueLongOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
    ObjectInspector valueStringOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    GenericUDFFromUnixTime udf = new GenericUDFFromUnixTime();
    ObjectInspector[] args = { valueLongOI, valueStringOI };
    udf.initialize(args);
    Timestamp ts = Timestamp.valueOf("2010-01-13 11:57:40");
    TimestampTZ tstz1 = TimestampTZUtil.convert(ts, ZoneId.systemDefault());
    runAndVerify(udf, new LongWritable(tstz1.getEpochSecond()), "MM/dd/yy HH:mm:ss", new Text("01/13/10 11:57:40"));
    runAndVerify(udf, new LongWritable(tstz1.getEpochSecond()), "EEEE", new Text("Wednesday"));
    runAndVerify(udf, new LongWritable(tstz1.getEpochSecond()), "yyyy-MM-dd'T'HH:mm:ssXXX", new Text("2010-01-13T11:57:40-08:00"));
    runAndVerify(udf, new LongWritable(tstz1.getEpochSecond()), "uuuu-MM-dd'T'HH:mm:ssXXX", new Text("2010-01-13T11:57:40-08:00"));
}
Also used : TimestampTZ(org.apache.hadoop.hive.common.type.TimestampTZ) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Text(org.apache.hadoop.io.Text) LongWritable(org.apache.hadoop.io.LongWritable) Timestamp(org.apache.hadoop.hive.common.type.Timestamp) Test(org.junit.Test)

Example 39 with PrimitiveObjectInspectorFactory.writableLongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector in project hive by apache.

the class TestGenericUDFAddMonths method testAddMonthsLong.

@Test
public void testAddMonthsLong() throws HiveException {
    @SuppressWarnings("resource") GenericUDFAddMonths udf = new GenericUDFAddMonths();
    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
    ObjectInspector[] arguments = { valueOI0, valueOI1 };
    try {
        udf.initialize(arguments);
        assertTrue("add_months exception expected", false);
    } catch (UDFArgumentTypeException e) {
        assertEquals("add_months test", "add_months only takes INT/SHORT/BYTE types as 2nd argument, got LONG", e.getMessage());
    }
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) Test(org.junit.Test)

Example 40 with PrimitiveObjectInspectorFactory.writableLongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector in project hive by apache.

the class TestKuduSerDe method testSerDeRoundTrip.

@Test
public void testSerDeRoundTrip() throws Exception {
    KuduSerDe serDe = new KuduSerDe();
    serDe.initialize(BASE_CONF, TBL_PROPS, null);
    PartialRow before = SCHEMA.newPartialRow();
    before.addByte("key", (byte) 1);
    before.addShort("int16", (short) 1);
    before.addInt("int32", 1);
    before.addLong("int64", 1L);
    before.addBoolean("bool", true);
    before.addFloat("float", 1.1f);
    before.addDouble("double", 1.1d);
    before.addString("string", "one");
    before.addBinary("binary", "one".getBytes(UTF_8));
    before.addTimestamp("timestamp", new Timestamp(NOW_MS));
    before.addDecimal("decimal", new BigDecimal("1.111"));
    before.setNull("null");
    before.addInt("default", 1);
    KuduWritable beforeWritable = new KuduWritable(before);
    Object object = serDe.deserialize(beforeWritable);
    // Capitalized `key` field to check for field case insensitivity.
    List<String> fieldNames = Arrays.asList("KEY", "int16", "int32", "int64", "bool", "float", "double", "string", "binary", "timestamp", "decimal", "null", "default");
    List<ObjectInspector> ois = Arrays.asList(PrimitiveObjectInspectorFactory.writableByteObjectInspector, PrimitiveObjectInspectorFactory.writableShortObjectInspector, PrimitiveObjectInspectorFactory.writableIntObjectInspector, PrimitiveObjectInspectorFactory.writableLongObjectInspector, PrimitiveObjectInspectorFactory.writableBooleanObjectInspector, PrimitiveObjectInspectorFactory.writableFloatObjectInspector, PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.writableStringObjectInspector, PrimitiveObjectInspectorFactory.writableBinaryObjectInspector, PrimitiveObjectInspectorFactory.writableTimestampObjectInspector, PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector, PrimitiveObjectInspectorFactory.writableStringObjectInspector, PrimitiveObjectInspectorFactory.writableIntObjectInspector);
    StandardStructObjectInspector objectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, ois);
    KuduWritable afterWritable = serDe.serialize(object, objectInspector);
    PartialRow after = afterWritable.getPartialRow();
    for (int i = 0; i < SCHEMA.getColumnCount(); i++) {
        if (SCHEMA.getColumnByIndex(i).getType() == Type.BINARY) {
            assertArrayEquals("Columns not equal at index: " + i, before.getBinaryCopy(i), after.getBinaryCopy(i));
        } else {
            assertEquals("Columns not equal at index: " + i, before.getObject(i), after.getObject(i));
        }
    }
}
Also used : StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PartialRow(org.apache.kudu.client.PartialRow) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) CoreMatchers.containsString(org.hamcrest.CoreMatchers.containsString) Timestamp(java.sql.Timestamp) BigDecimal(java.math.BigDecimal) Test(org.junit.Test)

Aggregations

ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)37 LongWritable (org.apache.hadoop.io.LongWritable)32 Test (org.junit.Test)30 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)25 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)17 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)17 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)8 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)7 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)7 Text (org.apache.hadoop.io.Text)7 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)6 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)5 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)4 StandardStructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector)4 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)4 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)3 TimestampTZ (org.apache.hadoop.hive.common.type.TimestampTZ)3 LongColumnVector (org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)3 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)3 Converter (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter)3