Search in sources :

Example 81 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class TypedBytesRecordReader method initialize.

public void initialize(InputStream in, Configuration conf, Properties tbl) throws IOException {
    din = new DataInputStream(in);
    tbIn = new TypedBytesWritableInput(din);
    tbOut = new TypedBytesWritableOutput(barrStr);
    String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
    columnTypes = Arrays.asList(columnTypeProperty.split(","));
    for (String columnType : columnTypes) {
        PrimitiveTypeInfo dstTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(columnType);
        dstOIns.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(dstTypeInfo));
    }
}
Also used : DataInputStream(java.io.DataInputStream) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 82 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class GenericUDFAdd10 method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    if (arguments.length != 1) {
        throw new UDFArgumentLengthException("ADD10() requires 1 argument, got " + arguments.length);
    }
    if (arguments[0].getCategory() != Category.PRIMITIVE) {
        throw new UDFArgumentException("ADD10 only takes primitive types, got " + arguments[0].getTypeName());
    }
    argumentOI = (PrimitiveObjectInspector) arguments[0];
    inputType = argumentOI.getPrimitiveCategory();
    ObjectInspector outputOI = null;
    switch(inputType) {
        case SHORT:
        case BYTE:
        case INT:
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableIntObjectInspector);
            outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
            break;
        case LONG:
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableLongObjectInspector);
            outputOI = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
            break;
        case FLOAT:
        case STRING:
        case DOUBLE:
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
            outputOI = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
            break;
        case DECIMAL:
            outputOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(((PrimitiveObjectInspector) arguments[0]).getTypeInfo());
            inputConverter = ObjectInspectorConverters.getConverter(arguments[0], outputOI);
            break;
        default:
            throw new UDFArgumentException("ADD10 only takes SHORT/BYTE/INT/LONG/DOUBLE/FLOAT/STRING/DECIMAL types, got " + inputType);
    }
    return outputOI;
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) UDFArgumentLengthException(org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)

Example 83 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class DynamicPartitionPruner method prunePartitionSingleSource.

@VisibleForTesting
protected void prunePartitionSingleSource(String source, SourceInfo si) throws HiveException {
    if (si.skipPruning.get()) {
        // in this case we've determined that there's too much data
        // to prune dynamically.
        LOG.info("Skip pruning on " + source + ", column " + si.columnName);
        return;
    }
    Set<Object> values = si.values;
    String columnName = si.columnName;
    if (LOG.isDebugEnabled()) {
        StringBuilder sb = new StringBuilder("Pruning ");
        sb.append(columnName);
        sb.append(" with ");
        for (Object value : values) {
            sb.append(value == null ? null : value.toString());
            sb.append(", ");
        }
        LOG.debug(sb.toString());
    }
    ObjectInspector oi = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(si.columnType));
    Converter converter = ObjectInspectorConverters.getConverter(PrimitiveObjectInspectorFactory.javaStringObjectInspector, oi);
    StructObjectInspector soi = ObjectInspectorFactory.getStandardStructObjectInspector(Collections.singletonList(columnName), Collections.singletonList(oi));
    @SuppressWarnings("rawtypes") ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(si.partKey);
    eval.initialize(soi);
    applyFilterToPartitions(converter, eval, columnName, values);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) Converter(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 84 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class TestGenericUDFFloor method testChar.

@Test
public void testChar() throws HiveException {
    GenericUDFFloor udf = new GenericUDFFloor();
    HiveChar vc = new HiveChar("32300.004747", 12);
    HiveCharWritable input = new HiveCharWritable(vc);
    CharTypeInfo inputTypeInfo = TypeInfoFactory.getCharTypeInfo(12);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
    DeferredObject[] args = { new DeferredJavaObject(input) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
    LongWritable res = (LongWritable) udf.evaluate(args);
    Assert.assertEquals(32300L, res.get());
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) Test(org.junit.Test)

Example 85 with PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector in project hive by apache.

the class TestGenericUDFOPNegative method testVarchar.

@Test
public void testVarchar() throws HiveException {
    GenericUDFOPNegative udf = new GenericUDFOPNegative();
    HiveVarchar vc = new HiveVarchar("32300.004747", 12);
    HiveVarcharWritable input = new HiveVarcharWritable(vc);
    VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
    DeferredObject[] args = { new DeferredJavaObject(input) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
    Assert.assertEquals(new Double(-32300.004747), new Double(res.get()));
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) Test(org.junit.Test)

Aggregations

ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)69 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)63 Test (org.junit.Test)45 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)35 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)35 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)24 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)16 ArrayList (java.util.ArrayList)15 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)13 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)12 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)12 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)12 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)10 LongWritable (org.apache.hadoop.io.LongWritable)10 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)9 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)8 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)7 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)7 Category (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category)6 Text (org.apache.hadoop.io.Text)6