Search in sources :

Example 16 with PrimitiveObjectInspectorFactory.writableShortObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableShortObjectInspector in project hive by apache.

the class TestGenericUDFPower method testBytePowerShort.

@Test
public void testBytePowerShort() throws HiveException {
    GenericUDFPower udf = new GenericUDFPower();
    ByteWritable left = new ByteWritable((byte) 2);
    ShortWritable right = new ShortWritable((short) 4);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableByteObjectInspector, PrimitiveObjectInspectorFactory.writableShortObjectInspector };
    DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
    DoubleWritable res = (DoubleWritable) udf.evaluate(args);
    Assert.assertEquals(new Double(16), new Double(res.get()));
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Test(org.junit.Test)

Example 17 with PrimitiveObjectInspectorFactory.writableShortObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableShortObjectInspector in project hive by apache.

the class TestGenericUDFOPMod method testModByZero2.

@Test
public void testModByZero2() throws HiveException {
    GenericUDFOPMod udf = new GenericUDFOPMod();
    // Short
    ShortWritable s1 = new ShortWritable((short) 4);
    ShortWritable s2 = new ShortWritable((short) 0);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableShortObjectInspector, PrimitiveObjectInspectorFactory.writableShortObjectInspector };
    DeferredObject[] args = { new DeferredJavaObject(s1), new DeferredJavaObject(s2) };
    udf.initialize(inputOIs);
    ShortWritable s3 = (ShortWritable) udf.evaluate(args);
    Assert.assertNull(s3);
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Test(org.junit.Test)

Example 18 with PrimitiveObjectInspectorFactory.writableShortObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableShortObjectInspector in project hive by apache.

the class TestGenericUDFOPMultiply method testByteTimesShort.

@Test
public void testByteTimesShort() throws HiveException {
    GenericUDFOPMultiply udf = new GenericUDFOPMultiply();
    ByteWritable left = new ByteWritable((byte) 4);
    ShortWritable right = new ShortWritable((short) 6);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableByteObjectInspector, PrimitiveObjectInspectorFactory.writableShortObjectInspector };
    DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.shortTypeInfo);
    ShortWritable res = (ShortWritable) udf.evaluate(args);
    Assert.assertEquals(24, res.get());
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) Test(org.junit.Test)

Example 19 with PrimitiveObjectInspectorFactory.writableShortObjectInspector

use of org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableShortObjectInspector in project hive by apache.

the class CommonJoinOperator method initializeOp.

@Override
@SuppressWarnings("unchecked")
protected void initializeOp(Configuration hconf) throws HiveException {
    super.initializeOp(hconf);
    closeOpCalled = false;
    this.handleSkewJoin = conf.getHandleSkewJoin();
    this.hconf = hconf;
    heartbeatInterval = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVESENDHEARTBEAT);
    countAfterReport = 0;
    totalSz = 0;
    int tagLen = conf.getTagLength();
    // Map that contains the rows for each alias
    storage = new AbstractRowContainer[tagLen];
    numAliases = conf.getExprs().size();
    joinValues = new List[tagLen];
    joinFilters = new List[tagLen];
    order = conf.getTagOrder();
    condn = conf.getConds();
    nullsafes = conf.getNullSafes();
    noOuterJoin = conf.isNoOuterJoin();
    totalSz = JoinUtil.populateJoinKeyValue(joinValues, conf.getExprs(), order, NOTSKIPBIGTABLE, hconf);
    // process join filters
    joinFilters = new List[tagLen];
    JoinUtil.populateJoinKeyValue(joinFilters, conf.getFilters(), order, NOTSKIPBIGTABLE, hconf);
    joinValuesObjectInspectors = JoinUtil.getObjectInspectorsFromEvaluators(joinValues, inputObjInspectors, NOTSKIPBIGTABLE, tagLen);
    joinFilterObjectInspectors = JoinUtil.getObjectInspectorsFromEvaluators(joinFilters, inputObjInspectors, NOTSKIPBIGTABLE, tagLen);
    joinValuesStandardObjectInspectors = JoinUtil.getStandardObjectInspectors(joinValuesObjectInspectors, NOTSKIPBIGTABLE, tagLen);
    filterMaps = conf.getFilterMap();
    if (noOuterJoin) {
        rowContainerStandardObjectInspectors = joinValuesStandardObjectInspectors;
    } else {
        List<ObjectInspector>[] rowContainerObjectInspectors = new List[tagLen];
        for (Byte alias : order) {
            ArrayList<ObjectInspector> rcOIs = new ArrayList<ObjectInspector>();
            rcOIs.addAll(joinValuesObjectInspectors[alias]);
            // for each alias, add object inspector for short as the last element
            rcOIs.add(PrimitiveObjectInspectorFactory.writableShortObjectInspector);
            rowContainerObjectInspectors[alias] = rcOIs;
        }
        rowContainerStandardObjectInspectors = JoinUtil.getStandardObjectInspectors(rowContainerObjectInspectors, NOTSKIPBIGTABLE, tagLen);
    }
    dummyObj = new ArrayList[numAliases];
    dummyObjVectors = new RowContainer[numAliases];
    joinEmitInterval = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVEJOINEMITINTERVAL);
    joinCacheSize = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVEJOINCACHESIZE);
    // construct dummy null row (indicating empty table) and
    // construct spill table serde which is used if input is too
    // large to fit into main memory.
    byte pos = 0;
    for (Byte alias : order) {
        int sz = conf.getExprs().get(alias).size();
        ArrayList<Object> nr = new ArrayList<Object>(sz);
        for (int j = 0; j < sz; j++) {
            nr.add(null);
        }
        if (!noOuterJoin) {
            // add whether the row is filtered or not
            // this value does not matter for the dummyObj
            // because the join values are already null
            nr.add(new ShortWritable());
        }
        dummyObj[pos] = nr;
        // there should be only 1 dummy object in the RowContainer
        RowContainer<List<Object>> values = JoinUtil.getRowContainer(hconf, rowContainerStandardObjectInspectors[pos], alias, 1, spillTableDesc, conf, !hasFilter(pos), reporter);
        values.addRow(dummyObj[pos]);
        dummyObjVectors[pos] = values;
        // if serde is null, the input doesn't need to be spilled out
        // e.g., the output columns does not contains the input table
        RowContainer<List<Object>> rc = JoinUtil.getRowContainer(hconf, rowContainerStandardObjectInspectors[pos], alias, joinCacheSize, spillTableDesc, conf, !hasFilter(pos), reporter);
        storage[pos] = rc;
        pos++;
    }
    forwardCache = new Object[totalSz];
    aliasFilterTags = new short[numAliases];
    Arrays.fill(aliasFilterTags, (byte) 0xff);
    filterTags = new short[numAliases];
    skipVectors = new boolean[numAliases][];
    for (int i = 0; i < skipVectors.length; i++) {
        skipVectors[i] = new boolean[i + 1];
    }
    intermediate = new List[numAliases];
    offsets = new int[numAliases + 1];
    int sum = 0;
    for (int i = 0; i < numAliases; i++) {
        offsets[i] = sum;
        sum += joinValues[order[i]].size();
    }
    offsets[numAliases] = sum;
    outputObjInspector = getJoinOutputObjectInspector(order, joinValuesStandardObjectInspectors, conf);
    for (int i = 0; i < condn.length; i++) {
        if (condn[i].getType() == JoinDesc.LEFT_SEMI_JOIN) {
            hasLeftSemiJoin = true;
        }
    }
    // Create post-filtering evaluators if needed
    if (conf.getResidualFilterExprs() != null) {
        residualJoinFilters = new ArrayList<>(conf.getResidualFilterExprs().size());
        residualJoinFiltersOIs = new ArrayList<>(conf.getResidualFilterExprs().size());
        for (int i = 0; i < conf.getResidualFilterExprs().size(); i++) {
            ExprNodeDesc expr = conf.getResidualFilterExprs().get(i);
            residualJoinFilters.add(ExprNodeEvaluatorFactory.get(expr));
            residualJoinFiltersOIs.add(residualJoinFilters.get(i).initialize(outputObjInspector));
        }
        needsPostEvaluation = true;
        if (!noOuterJoin) {
            // We need to disable join emit interval, since for outer joins with post conditions
            // we need to have the full view on the right matching rows to know whether we need
            // to produce a row with NULL values or not
            joinEmitInterval = -1;
        }
    }
    if (LOG.isInfoEnabled()) {
        LOG.info("JOIN " + outputObjInspector.getTypeName() + " totalsz = " + totalSz);
    }
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) ArrayList(java.util.ArrayList) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayList(java.util.ArrayList) List(java.util.List) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Aggregations

ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)17 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)15 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)14 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)12 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)12 Test (org.junit.Test)12 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)7 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)4 LongWritable (org.apache.hadoop.io.LongWritable)4 FloatWritable (org.apache.hadoop.io.FloatWritable)3 ArrayList (java.util.ArrayList)2 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)2 UDFArgumentLengthException (org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException)2 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)2 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)2 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)2 IntWritable (org.apache.hadoop.io.IntWritable)2 ParseException (java.text.ParseException)1 List (java.util.List)1 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)1