Search in sources :

Example 1 with GenericUDFOPNegative

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative in project hive by apache.

the class TestVectorNegative method doTests.

private void doTests(Random random, TypeInfo typeInfo) throws Exception {
    String typeName = typeInfo.getTypeName();
    PrimitiveCategory primitiveCategory1 = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    int columnNum = 1;
    generationSpecList.add(GenerationSpec.createSameType(typeInfo));
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    ExprNodeDesc col1Expr;
    String columnName = "col" + (columnNum++);
    col1Expr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
    columns.add(columnName);
    List<ObjectInspector> objectInspectorList = new ArrayList<ObjectInspector>();
    objectInspectorList.add(VectorRandomRowSource.getObjectInspector(typeInfo));
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    // ----------------------------------------------------------------------------------------------
    String[] columnNames = columns.toArray(new String[0]);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    GenericUDF genericUdf = new GenericUDFOPNegative();
    ObjectInspector[] objectInspectors = objectInspectorList.toArray(new ObjectInspector[objectInspectorList.size()]);
    ObjectInspector outputObjectInspector = null;
    try {
        outputObjectInspector = genericUdf.initialize(objectInspectors);
    } catch (Exception e) {
        Assert.fail(e.toString());
    }
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(outputTypeInfo, genericUdf, children);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[NegativeTestMode.count][];
    for (int i = 0; i < NegativeTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        NegativeTestMode negativeTestMode = NegativeTestMode.values()[i];
        switch(negativeTestMode) {
            case ROW_MODE:
                doRowArithmeticTest(typeInfo, columns, children, exprDesc, randomRows, rowSource.rowStructObjectInspector(), outputTypeInfo, resultObjects);
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                doVectorArithmeticTest(typeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, exprDesc, negativeTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects);
                break;
            default:
                throw new RuntimeException("Unexpected Negative operator test mode " + negativeTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < NegativeTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " typeName " + typeName + " outputTypeName " + outputTypeInfo.getTypeName() + " " + NegativeTestMode.values()[v] + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i]));
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " typeName " + typeName + " outputTypeName " + outputTypeInfo.getTypeName() + " " + NegativeTestMode.values()[v] + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
                }
            }
        }
    }
}
Also used : ArrayList(java.util.ArrayList) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFOPNegative(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 2 with GenericUDFOPNegative

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative in project hive by apache.

the class TestVectorizationContext method testUnaryMinusColumnDouble.

@Test
public void testUnaryMinusColumnDouble() throws HiveException {
    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Float.class, "col1", "table", false);
    GenericUDF gudf = new GenericUDFOPNegative();
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
    children.add(col1Expr);
    ExprNodeGenericFuncDesc negExprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.doubleTypeInfo, gudf, children);
    List<String> columns = new ArrayList<String>();
    columns.add("col0");
    columns.add("col1");
    VectorizationContext vc = new VectorizationContext("name", columns);
    VectorExpression ve = vc.getVectorExpression(negExprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
    assertTrue(ve instanceof DoubleColUnaryMinus);
}
Also used : GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) DoubleColUnaryMinus(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColUnaryMinus) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ArrayList(java.util.ArrayList) GenericUDFOPNegative(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) Test(org.junit.Test)

Example 3 with GenericUDFOPNegative

use of org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative in project hive by apache.

the class TestVectorizationContext method testUnaryMinusColumnLong.

@Test
public void testUnaryMinusColumnLong() throws HiveException {
    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false);
    GenericUDF gudf = new GenericUDFOPNegative();
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(1);
    children.add(col1Expr);
    ExprNodeGenericFuncDesc negExprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.longTypeInfo, gudf, children);
    List<String> columns = new ArrayList<String>();
    columns.add("col0");
    columns.add("col1");
    VectorizationContext vc = new VectorizationContext("name", columns);
    VectorExpression ve = vc.getVectorExpression(negExprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
    assertTrue(ve instanceof LongColUnaryMinus);
}
Also used : GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ArrayList(java.util.ArrayList) GenericUDFOPNegative(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) LongColUnaryMinus(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColUnaryMinus) Test(org.junit.Test)

Aggregations

ArrayList (java.util.ArrayList)3 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)3 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)3 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)3 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)3 GenericUDFOPNegative (org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative)3 VectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)2 Test (org.junit.Test)2 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)1 VectorRandomBatchSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource)1 VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)1 GenerationSpec (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec)1 DoubleColUnaryMinus (org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColUnaryMinus)1 LongColUnaryMinus (org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColUnaryMinus)1 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)1 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)1 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)1 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)1 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)1 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)1