Search in sources :

Example 16 with TypeInfoFactory.longTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.

the class ExprNodeGenericFuncDesc method newInstance.

/**
 * Create a ExprNodeGenericFuncDesc based on the genericUDFClass and the
 * children parameters. If the function has an explicit name, the
 * newInstance method should be passed the function name in the funcText
 * argument.
 *
 * @throws UDFArgumentException
 */
public static ExprNodeGenericFuncDesc newInstance(GenericUDF genericUDF, String funcText, List<ExprNodeDesc> children) throws UDFArgumentException {
    ObjectInspector[] childrenOIs = new ObjectInspector[children.size()];
    for (int i = 0; i < childrenOIs.length; i++) {
        childrenOIs[i] = children.get(i).getWritableObjectInspector();
    }
    // Perform the check here instead of in GenericUDFBaseCompare to guarantee it is only run once per operator
    if (genericUDF instanceof GenericUDFBaseCompare && children.size() == 2) {
        TypeInfo oiTypeInfo0 = children.get(0).getTypeInfo();
        TypeInfo oiTypeInfo1 = children.get(1).getTypeInfo();
        SessionState ss = SessionState.get();
        Configuration conf = (ss != null) ? ss.getConf() : new Configuration();
        LogHelper console = new LogHelper(LOG);
        // For now, if a bigint is going to be cast to a double throw an error or warning
        if ((oiTypeInfo0.equals(TypeInfoFactory.stringTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) || (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.stringTypeInfo))) {
            String error = StrictChecks.checkTypeSafety(conf);
            if (error != null)
                throw new UDFArgumentException(error);
            console.printError("WARNING: Comparing a bigint and a string may result in a loss of precision.");
        } else if ((oiTypeInfo0.equals(TypeInfoFactory.doubleTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.longTypeInfo)) || (oiTypeInfo0.equals(TypeInfoFactory.longTypeInfo) && oiTypeInfo1.equals(TypeInfoFactory.doubleTypeInfo))) {
            String error = StrictChecks.checkTypeSafety(conf);
            if (error != null)
                throw new UDFArgumentException(error);
            console.printError("WARNING: Comparing a bigint and a double may result in a loss of precision.");
        }
    }
    ObjectInspector oi = genericUDF.initializeAndFoldConstants(childrenOIs);
    String[] requiredJars = genericUDF.getRequiredJars();
    String[] requiredFiles = genericUDF.getRequiredFiles();
    SessionState ss = SessionState.get();
    if (requiredJars != null) {
        SessionState.ResourceType t = SessionState.find_resource_type("JAR");
        try {
            ss.add_resources(t, Arrays.asList(requiredJars));
        } catch (Exception e) {
            throw new UDFArgumentException(e);
        }
    }
    if (requiredFiles != null) {
        SessionState.ResourceType t = SessionState.find_resource_type("FILE");
        try {
            ss.add_resources(t, Arrays.asList(requiredFiles));
        } catch (Exception e) {
            throw new UDFArgumentException(e);
        }
    }
    return new ExprNodeGenericFuncDesc(oi, genericUDF, funcText, children);
}
Also used : SessionState(org.apache.hadoop.hive.ql.session.SessionState) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) Configuration(org.apache.hadoop.conf.Configuration) LogHelper(org.apache.hadoop.hive.ql.session.SessionState.LogHelper) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) GenericUDFBaseCompare(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBaseCompare)

Example 17 with TypeInfoFactory.longTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.

the class TestMapJoinOperator method doTestLong.

public void doTestLong(long seed, TypeInfo numberTypeInfo, VectorMapJoinVariation vectorMapJoinVariation) throws Exception {
    int rowCount = 10000;
    HiveConf hiveConf = new HiveConf();
    String[] bigTableColumnNames = new String[] { "number1" };
    TypeInfo[] bigTableTypeInfos = new TypeInfo[] { TypeInfoFactory.longTypeInfo };
    int[] bigTableKeyColumnNums = new int[] { 0 };
    String[] smallTableValueColumnNames = new String[] { "sv1", "sv2" };
    TypeInfo[] smallTableValueTypeInfos = new TypeInfo[] { TypeInfoFactory.dateTypeInfo, TypeInfoFactory.stringTypeInfo };
    int[] bigTableRetainColumnNums = new int[] { 0 };
    int[] smallTableRetainKeyColumnNums = new int[] {};
    int[] smallTableRetainValueColumnNums = new int[] { 0, 1 };
    SmallTableGenerationParameters smallTableGenerationParameters = new SmallTableGenerationParameters();
    // ----------------------------------------------------------------------------------------------
    MapJoinTestDescription testDesc = new MapJoinTestDescription(hiveConf, vectorMapJoinVariation, bigTableColumnNames, bigTableTypeInfos, bigTableKeyColumnNums, smallTableValueColumnNames, smallTableValueTypeInfos, bigTableRetainColumnNums, smallTableRetainKeyColumnNums, smallTableRetainValueColumnNums, smallTableGenerationParameters);
    // Prepare data.  Good for ANY implementation variation.
    MapJoinTestData testData = new MapJoinTestData(rowCount, testDesc, seed, seed * 10);
    executeTest(testDesc, testData);
}
Also used : SmallTableGenerationParameters(org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.SmallTableGenerationParameters) HiveConf(org.apache.hadoop.hive.conf.HiveConf) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)

Example 18 with TypeInfoFactory.longTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.

the class TestGenericUDFFloor method testString.

@Test
public void testString() throws HiveException {
    GenericUDFFloor udf = new GenericUDFFloor();
    Text input = new Text("32300.004747");
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableStringObjectInspector };
    DeferredObject[] args = { new DeferredJavaObject(input) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
    LongWritable res = (LongWritable) udf.evaluate(args);
    Assert.assertEquals(32300L, res.get());
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) Text(org.apache.hadoop.io.Text) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) Test(org.junit.Test)

Example 19 with TypeInfoFactory.longTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.

the class TestGenericUDFFloor method testShort.

@Test
public void testShort() throws HiveException {
    GenericUDFFloor udf = new GenericUDFFloor();
    ShortWritable input = new ShortWritable((short) 74);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableShortObjectInspector };
    DeferredObject[] args = { new DeferredJavaObject(input) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
    LongWritable res = (LongWritable) udf.evaluate(args);
    Assert.assertEquals(74L, res.get());
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Test(org.junit.Test)

Example 20 with TypeInfoFactory.longTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.longTypeInfo in project hive by apache.

the class TestGenericUDFFloor method testFloat.

@Test
public void testFloat() throws HiveException {
    GenericUDFFloor udf = new GenericUDFFloor();
    FloatWritable input = new FloatWritable(-323.4747f);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableFloatObjectInspector };
    DeferredObject[] args = { new DeferredJavaObject(input) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
    LongWritable res = (LongWritable) udf.evaluate(args);
    Assert.assertEquals(-324L, res.get());
}
Also used : FloatWritable(org.apache.hadoop.io.FloatWritable) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) LongWritable(org.apache.hadoop.io.LongWritable) Test(org.junit.Test)

Aggregations

ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)26 LongWritable (org.apache.hadoop.io.LongWritable)23 Test (org.junit.Test)23 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)21 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)20 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)20 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)12 HiveConf (org.apache.hadoop.hive.conf.HiveConf)4 SmallTableGenerationParameters (org.apache.hadoop.hive.ql.exec.vector.mapjoin.MapJoinTestDescription.SmallTableGenerationParameters)4 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)4 ArrayList (java.util.ArrayList)3 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)3 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)2 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)2 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)2 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)2 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)2 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)2 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)2 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)2