Search in sources :

Example 11 with TypeInfoFactory.longTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.

the class TestVectorUDFAdaptor method testLongUDF.

@Test
public void testLongUDF() {
    // create a syntax tree for a simple function call "longudf(col0)"
    ExprNodeGenericFuncDesc funcDesc;
    TypeInfo typeInfo = TypeInfoFactory.longTypeInfo;
    GenericUDFBridge genericUDFBridge = new GenericUDFBridge("longudf", false, LongUDF.class.getName());
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    ExprNodeColumnDesc colDesc = new ExprNodeColumnDesc(typeInfo, "col0", "tablename", false);
    children.add(colDesc);
    VectorUDFArgDesc[] argDescs = new VectorUDFArgDesc[1];
    argDescs[0] = new VectorUDFArgDesc();
    argDescs[0].setVariable(0);
    funcDesc = new ExprNodeGenericFuncDesc(typeInfo, genericUDFBridge, genericUDFBridge.getUdfName(), children);
    // create the adaptor for this function call to work in vector mode
    VectorUDFAdaptor vudf = null;
    try {
        vudf = new VectorUDFAdaptor(funcDesc, 1, "Long", argDescs);
    } catch (HiveException e) {
        // We should never get here.
        assertTrue(false);
    }
    VectorizedRowBatch b = getBatchLongInLongOut();
    vudf.evaluate(b);
    // verify output
    LongColumnVector out = (LongColumnVector) b.cols[1];
    assertEquals(1000, out.vector[0]);
    assertEquals(1001, out.vector[1]);
    assertEquals(1002, out.vector[2]);
    assertTrue(out.noNulls);
    assertFalse(out.isRepeating);
    // with nulls
    b = getBatchLongInLongOut();
    out = (LongColumnVector) b.cols[1];
    b.cols[0].noNulls = false;
    vudf.evaluate(b);
    assertFalse(out.noNulls);
    assertEquals(1000, out.vector[0]);
    assertEquals(1001, out.vector[1]);
    assertTrue(out.isNull[2]);
    assertFalse(out.isRepeating);
    // with repeating
    b = getBatchLongInLongOut();
    out = (LongColumnVector) b.cols[1];
    b.cols[0].isRepeating = true;
    vudf.evaluate(b);
    // The implementation may or may not set output it isRepeting.
    // That is implementation-defined.
    assertTrue(b.cols[1].isRepeating && out.vector[0] == 1000 || !b.cols[1].isRepeating && out.vector[2] == 1000);
    assertEquals(3, b.size);
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenericUDFBridge(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge) VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) LongUDF(org.apache.hadoop.hive.ql.exec.vector.udf.legacy.LongUDF) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector) Test(org.junit.Test)

Example 12 with TypeInfoFactory.longTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.

the class TestStreamingMin method minLong.

public void minLong(Iterator<Long> inVals, int inSz, int numPreceding, int numFollowing, Iterator<Long> outVals) throws HiveException {
    GenericUDAFMin fnR = new GenericUDAFMin();
    TypeInfo[] inputTypes = { TypeInfoFactory.longTypeInfo };
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableLongObjectInspector };
    LongWritable[] in = new LongWritable[1];
    in[0] = new LongWritable();
    TestStreamingSum._agg(fnR, inputTypes, inVals, TypeHandler.LongHandler, in, inputOIs, inSz, numPreceding, numFollowing, outVals);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) GenericUDAFMin(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFMin) LongWritable(org.apache.hadoop.io.LongWritable) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 13 with TypeInfoFactory.longTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.

the class TestStreamingSum method sumLong.

public void sumLong(Iterator<Long> inVals, int inSz, int numPreceding, int numFollowing, Iterator<Long> outVals) throws HiveException {
    GenericUDAFSum fnR = new GenericUDAFSum();
    TypeInfo[] inputTypes = { TypeInfoFactory.longTypeInfo };
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableLongObjectInspector };
    LongWritable[] in = new LongWritable[1];
    in[0] = new LongWritable();
    _agg(fnR, inputTypes, inVals, TypeHandler.LongHandler, in, inputOIs, inSz, numPreceding, numFollowing, outVals);
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenericUDAFSum(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFSum)

Example 14 with TypeInfoFactory.longTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.

the class TestGenericUDFOPNegative method testLong.

@Test
public void testLong() throws HiveException {
    GenericUDFOPNegative udf = new GenericUDFOPNegative();
    LongWritable input = new LongWritable(3234747);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableLongObjectInspector };
    DeferredObject[] args = { new DeferredJavaObject(input) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
    LongWritable res = (LongWritable) udf.evaluate(args);
    Assert.assertEquals(-3234747L, res.get());
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) Test(org.junit.Test)

Example 15 with TypeInfoFactory.longTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.

the class MapJoinMultiKeyBenchBase method doSetup.

public void doSetup(VectorMapJoinVariation vectorMapJoinVariation, MapJoinTestImplementation mapJoinImplementation) throws Exception {
    HiveConf hiveConf = new HiveConf();
    long seed = 2543;
    // 100,000.
    int rowCount = 100000;
    String[] bigTableColumnNames = new String[] { "b1", "b2", "b3" };
    TypeInfo[] bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.intTypeInfo, TypeInfoFactory.longTypeInfo, TypeInfoFactory.stringTypeInfo };
    int[] bigTableKeyColumnNums = new int[] { 0, 1, 2 };
    String[] smallTableValueColumnNames = new String[] { "sv1" };
    TypeInfo[] smallTableValueTypeInfos = new TypeInfo[] { TypeInfoFactory.stringTypeInfo };
    int[] bigTableRetainColumnNums = new int[] { 0, 1, 2 };
    int[] smallTableRetainKeyColumnNums = new int[] {};
    int[] smallTableRetainValueColumnNums = new int[] { 0 };
    SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
    smallTableGenerationParameters.setValueOption(ValueOption.ONLY_ONE);
    setupMapJoin(hiveConf, seed, rowCount, vectorMapJoinVariation, mapJoinImplementation, bigTableColumnNames, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueColumnNames, smallTableValueTypeInfos, bigTableRetainColumnNums, smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, smallTableGenerationParameters);
}
Also used : SmallTableGenerationParameters(org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.SmallTableGenerationParameters) HiveConf(org.apache.hadoop.hive.conf.HiveConf) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Aggregations

ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)26 LongWritable (org.apache.hadoop.io.LongWritable)23 Test (org.junit.Test)23 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)21 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)20 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)20 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)12 HiveConf (org.apache.hadoop.hive.conf.HiveConf)4 SmallTableGenerationParameters (org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.SmallTableGenerationParameters)4 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)4 ArrayList (java.util.ArrayList)3 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)3 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)2 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)2 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)2 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)2 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)2 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)2 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)2 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)2