Search in sources :

Example 46 with PrimitiveObjectInspectorFactory.writableLongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector in project hive by apache.

the class GenericUDFAdd10 method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 1) {
        throw new UDFArgumentLengthException("ADD10() requires 1 argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != Category.PRIMITIVE) {
        throw new UDFArgumentException("ADD10 only takes primitive types, got " + arguments[0].getTypeName());
    }
    argumentOI = (PrimitiveObjectInspector) arguments[0];
    inputType = argumentOI.getPrimitiveCategory();
    ObjectInspector outputOI = null;
    switch(inputType) {
        case SHORT:
        case BYTE:
        case INT:
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
            outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
            break;
        case LONG:
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableLongObjectInspector);
            outputOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
            break;
        case FLOAT:
        case STRING:
        case DOUBLE:
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
            outputOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
            break;
        case DECIMAL:
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(((PrimitiveObjectInspector) arguments[0]).getTypeInfo());
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], outputOI);
            break;
        default:
            throw new UDFArgumentException("ADD10 only takes SHORT/BYTE/INT/LONG/DOUBLE/FLOAT/STRING/DECIMAL types, got " + inputType);
    }
    return outputOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 47 with PrimitiveObjectInspectorFactory.writableLongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector in project hive by apache.

the class TestDeserializer method testListDeserialize.

@Test
public void testListDeserialize() {
    Schema schema = new Schema(optional(1, "list_type", Types.ListType.ofOptional(2, Types.LongType.get())));
    StructObjectInspector inspector = ObjectInspectorFactory.getStandardStructObjectInspector(Arrays.asList("list_type"), Arrays.asList(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableLongObjectInspector)));
    Deserializer deserializer = new Deserializer.Builder().schema(schema).writerInspector((StructObjectInspector) IcebergObjectInspector.create(schema)).sourceInspector(inspector).build();
    Record expected = GenericRecord.create(schema);
    expected.set(0, Collections.singletonList(1L));
    Object[] data = new Object[] { new Object[] { new LongWritable(1L) } };
    Record actual = deserializer.deserialize(data);
    Assert.assertEquals(expected, actual);
}
Also used : Schema(org.apache.iceberg.Schema) Record(org.apache.iceberg.data.Record) GenericRecord(org.apache.iceberg.data.GenericRecord) LongWritable(org.apache.hadoop.io.LongWritable) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) Test(org.junit.Test)

Example 48 with PrimitiveObjectInspectorFactory.writableLongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector in project hive by apache.

the class TestDeserializer method testSchemaDeserialize.

@Test
public void testSchemaDeserialize() {
    StandardStructObjectInspector schemaObjectInspector = ObjectInspectorFactory.getStandardStructObjectInspector(Arrays.asList("0:col1", "1:col2"), Arrays.asList(PrimitiveObjectInspectorFactory.writableLongObjectInspector, PrimitiveObjectInspectorFactory.writableStringObjectInspector));
    Deserializer deserializer = new Deserializer.Builder().schema(CUSTOMER_SCHEMA).writerInspector((StructObjectInspector) IcebergObjectInspector.create(CUSTOMER_SCHEMA)).sourceInspector(schemaObjectInspector).build();
    Record expected = GenericRecord.create(CUSTOMER_SCHEMA);
    expected.set(0, 1L);
    expected.set(1, "Bob");
    Record actual = deserializer.deserialize(new Object[] { new LongWritable(1L), new Text("Bob") });
    Assert.assertEquals(expected, actual);
}
Also used : StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) Record(org.apache.iceberg.data.Record) GenericRecord(org.apache.iceberg.data.GenericRecord) Text(org.apache.hadoop.io.Text) LongWritable(org.apache.hadoop.io.LongWritable) Test(org.junit.Test)

Example 49 with PrimitiveObjectInspectorFactory.writableLongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector in project hive by apache.

the class TestObjectInspectorConverters method convertLong.

private void convertLong() {
    Converter longConverter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.writableLongObjectInspector);
    assertEquals("LongConverter", new LongWritable(0), longConverter.convert(Integer.valueOf(0)));
    assertEquals("LongConverter", new LongWritable(1), longConverter.convert(Integer.valueOf(1)));
    assertEquals("LongConverter", null, longConverter.convert(null));
}
Also used : Converter(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter) LongWritable(org.apache.hadoop.io.LongWritable)

Example 50 with PrimitiveObjectInspectorFactory.writableLongObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector in project flink by apache.

the class HiveGenericUDFGrouping method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length < 2) {
        throw new UDFArgumentLengthException("grouping() requires at least 2 argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
        throw new UDFArgumentTypeException(0, "The first argument to grouping() must be primitive");
    }
    PrimitiveObjectInspector arg1OI = (PrimitiveObjectInspector) arguments[0];
    // cases it should be LONG.
    if (!(arg1OI.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.INT || arg1OI.getPrimitiveCategory() == PrimitiveObjectInspector.PrimitiveCategory.LONG)) {
        throw new UDFArgumentTypeException(0, "The first argument to grouping() must be an int/long. Got: " + arg1OI.getPrimitiveCategory());
    }
    groupingIdOI = arg1OI;
    indices = new int[arguments.length - 1];
    for (int i = 1; i < arguments.length; i++) {
        PrimitiveObjectInspector arg2OI = (PrimitiveObjectInspector) arguments[i];
        if (!(arg2OI instanceof ConstantObjectInspector)) {
            throw new UDFArgumentTypeException(i, "Must be a constant. Got: " + arg2OI.getClass().getSimpleName());
        }
        indices[i - 1] = PrimitiveObjectInspectorUtils.getInt(((ConstantObjectInspector) arguments[i]).getWritableConstantValue(), arg2OI);
    }
    return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
}
Also used : UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)

Aggregations

ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)37 LongWritable (org.apache.hadoop.io.LongWritable)32 Test (org.junit.Test)30 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)25 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)17 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)17 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)8 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)7 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)7 Text (org.apache.hadoop.io.Text)7 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)6 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)5 ConstantObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector)4 StandardStructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector)4 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)4 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)3 TimestampTZ (org.apache.hadoop.hive.common.type.TimestampTZ)3 LongColumnVector (org.apache.hadoop.hive.ql.exec.vector.LongColumnVector)3 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)3 Converter (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter)3