Search in sources :

Example 16 with VectorRandomBatchSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource in project hive by apache.

the class TestVectorFilterCompare method doTestsWithDiffColumnScalar.

private void doTestsWithDiffColumnScalar(Random random, TypeInfo typeInfo1, TypeInfo typeInfo2, ColumnScalarMode columnScalarMode, Comparison comparison, boolean tryDecimal64) throws Exception {
    String typeName1 = typeInfo1.getTypeName();
    PrimitiveCategory primitiveCategory1 = ((PrimitiveTypeInfo) typeInfo1).getPrimitiveCategory();
    String typeName2 = typeInfo2.getTypeName();
    PrimitiveCategory primitiveCategory2 = ((PrimitiveTypeInfo) typeInfo2).getPrimitiveCategory();
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    int columnNum = 1;
    ExprNodeDesc col1Expr;
    Object scalar1Object = null;
    final boolean decimal64Enable1 = checkDecimal64(tryDecimal64, typeInfo1);
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
        generationSpecList.add(GenerationSpec.createSameType(typeInfo1));
        explicitDataTypePhysicalVariationList.add(decimal64Enable1 ? DataTypePhysicalVariation.DECIMAL_64 : DataTypePhysicalVariation.NONE);
        String columnName = "col" + (columnNum++);
        col1Expr = new ExprNodeColumnDesc(typeInfo1, columnName, "table", false);
        columns.add(columnName);
    } else {
        scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo1);
        // Adjust the decimal type to the scalar's type...
        if (typeInfo1 instanceof DecimalTypeInfo) {
            typeInfo1 = getDecimalScalarTypeInfo(scalar1Object);
        }
        col1Expr = new ExprNodeConstantDesc(typeInfo1, scalar1Object);
    }
    ExprNodeDesc col2Expr;
    Object scalar2Object = null;
    final boolean decimal64Enable2 = checkDecimal64(tryDecimal64, typeInfo2);
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
        generationSpecList.add(GenerationSpec.createSameType(typeInfo2));
        explicitDataTypePhysicalVariationList.add(decimal64Enable2 ? DataTypePhysicalVariation.DECIMAL_64 : DataTypePhysicalVariation.NONE);
        String columnName = "col" + (columnNum++);
        col2Expr = new ExprNodeColumnDesc(typeInfo2, columnName, "table", false);
        columns.add(columnName);
    } else {
        scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo2);
        // Adjust the decimal type to the scalar's type...
        if (typeInfo2 instanceof DecimalTypeInfo) {
            typeInfo2 = getDecimalScalarTypeInfo(scalar2Object);
        }
        col2Expr = new ExprNodeConstantDesc(typeInfo2, scalar2Object);
    }
    List<ObjectInspector> objectInspectorList = new ArrayList<ObjectInspector>();
    objectInspectorList.add(VectorRandomRowSource.getObjectInspector(typeInfo1));
    objectInspectorList.add(VectorRandomRowSource.getObjectInspector(typeInfo2));
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    children.add(col2Expr);
    // ----------------------------------------------------------------------------------------------
    String[] columnNames = columns.toArray(new String[0]);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    GenericUDF genericUdf;
    switch(comparison) {
        case EQUALS:
            genericUdf = new GenericUDFOPEqual();
            break;
        case LESS_THAN:
            genericUdf = new GenericUDFOPLessThan();
            break;
        case LESS_THAN_EQUAL:
            genericUdf = new GenericUDFOPEqualOrLessThan();
            break;
        case GREATER_THAN:
            genericUdf = new GenericUDFOPGreaterThan();
            break;
        case GREATER_THAN_EQUAL:
            genericUdf = new GenericUDFOPEqualOrGreaterThan();
            break;
        case NOT_EQUALS:
            genericUdf = new GenericUDFOPNotEqual();
            break;
        default:
            throw new RuntimeException("Unexpected arithmetic " + comparison);
    }
    ObjectInspector[] objectInspectors = objectInspectorList.toArray(new ObjectInspector[objectInspectorList.size()]);
    ObjectInspector outputObjectInspector = null;
    try {
        outputObjectInspector = genericUdf.initialize(objectInspectors);
    } catch (Exception e) {
        Assert.fail(e.toString());
    }
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(outputTypeInfo, genericUdf, children);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[FilterCompareTestMode.count][];
    for (int i = 0; i < FilterCompareTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        FilterCompareTestMode filterCompareTestMode = FilterCompareTestMode.values()[i];
        switch(filterCompareTestMode) {
            case ROW_MODE:
                doRowFilterCompareTest(typeInfo1, typeInfo2, columns, children, exprDesc, comparison, randomRows, columnScalarMode, rowSource.rowStructObjectInspector(), outputTypeInfo, resultObjects);
                break;
            case ADAPTOR:
            case FILTER_VECTOR_EXPRESSION:
            case COMPARE_VECTOR_EXPRESSION:
                doVectorFilterCompareTest(typeInfo1, typeInfo2, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, exprDesc, comparison, filterCompareTestMode, columnScalarMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects);
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + filterCompareTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < FilterCompareTestMode.count; v++) {
            FilterCompareTestMode filterCompareTestMode = FilterCompareTestMode.values()[v];
            Object vectorResult = resultObjectsArray[v][i];
            if (filterCompareTestMode == FilterCompareTestMode.FILTER_VECTOR_EXPRESSION && expectedResult == null && vectorResult != null) {
                // This is OK.
                boolean vectorBoolean = ((BooleanWritable) vectorResult).get();
                if (vectorBoolean) {
                    Assert.fail("Row " + i + " typeName1 " + typeName1 + " typeName2 " + typeName2 + " outputTypeName " + outputTypeInfo.getTypeName() + " " + comparison + " " + filterCompareTestMode + " " + columnScalarMode + " result is NOT NULL and true" + " does not match row-mode expected result is NULL which means false here" + (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ? " scalar1 " + scalar1Object.toString() : "") + " row values " + Arrays.toString(randomRows[i]) + (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ? " scalar2 " + scalar2Object.toString() : ""));
                }
            } else if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " typeName1 " + typeName1 + " typeName2 " + typeName2 + " outputTypeName " + outputTypeInfo.getTypeName() + " " + comparison + " " + filterCompareTestMode + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ? " scalar1 " + scalar1Object.toString() : "") + " row values " + Arrays.toString(randomRows[i]) + (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ? " scalar2 " + scalar2Object.toString() : ""));
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " typeName1 " + typeName1 + " typeName2 " + typeName2 + " outputTypeName " + outputTypeInfo.getTypeName() + " " + comparison + " " + filterCompareTestMode + " " + columnScalarMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ? " scalar1 " + scalar1Object.toString() : "") + " row values " + Arrays.toString(randomRows[i]) + (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ? " scalar2 " + scalar2Object.toString() : ""));
                }
            }
        }
    }
}
Also used : GenericUDFOPGreaterThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan) GenericUDFOPEqualOrLessThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan) ArrayList(java.util.ArrayList) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) GenericUDFOPNotEqual(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) GenericUDFOPEqual(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) GenericUDFOPLessThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) GenericUDFOPEqualOrGreaterThan(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) BooleanWritable(org.apache.hadoop.io.BooleanWritable) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 17 with VectorRandomBatchSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource in project hive by apache.

the class TestVectorAggregation method doMerge.

private void doMerge(GenericUDAFEvaluator.Mode mergeUdafEvaluatorMode, Random random, String aggregationName, TypeInfo typeInfo, GenerationSpec keyGenerationSpec, List<String> columns, String[] columnNames, int dataAggrMaxKeyCount, int reductionFactor, TypeInfo partial1OutputTypeInfo, Object[] partial1ResultsArray) throws Exception {
    List<GenerationSpec> mergeAggrGenerationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> mergeDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    mergeAggrGenerationSpecList.add(keyGenerationSpec);
    mergeDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    // Use OMIT for both.  We will fill in the data from the PARTIAL1 results.
    GenerationSpec mergeGenerationSpec = GenerationSpec.createOmitGeneration(partial1OutputTypeInfo);
    mergeAggrGenerationSpecList.add(mergeGenerationSpec);
    mergeDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    ExprNodeColumnDesc mergeCol1Expr = new ExprNodeColumnDesc(partial1OutputTypeInfo, "col1", "table", false);
    List<ExprNodeDesc> mergeParameters = new ArrayList<ExprNodeDesc>();
    mergeParameters.add(mergeCol1Expr);
    final int mergeParameterCount = mergeParameters.size();
    ObjectInspector[] mergeParameterObjectInspectors = new ObjectInspector[mergeParameterCount];
    for (int i = 0; i < mergeParameterCount; i++) {
        TypeInfo paramTypeInfo = mergeParameters.get(i).getTypeInfo();
        mergeParameterObjectInspectors[i] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(paramTypeInfo);
    }
    VectorRandomRowSource mergeRowSource = new VectorRandomRowSource();
    mergeRowSource.initGenerationSpecSchema(random, mergeAggrGenerationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    false, /* isUnicodeOk */
    true, mergeDataTypePhysicalVariationList);
    Object[][] mergeRandomRows = mergeRowSource.randomRows(TEST_ROW_COUNT);
    // Reduce the key range to cause there to be work for each PARTIAL2 key.
    final int mergeMaxKeyCount = dataAggrMaxKeyCount / reductionFactor;
    Object[] partial1Results = (Object[]) partial1ResultsArray[0];
    short partial1Key = 0;
    for (int i = 0; i < mergeRandomRows.length; i++) {
        // Find a non-NULL entry...
        while (true) {
            if (partial1Key >= dataAggrMaxKeyCount) {
                partial1Key = 0;
            }
            if (partial1Results[partial1Key] != null) {
                break;
            }
            partial1Key++;
        }
        final short mergeKey = (short) (partial1Key % mergeMaxKeyCount);
        mergeRandomRows[i][0] = new ShortWritable(mergeKey);
        mergeRandomRows[i][1] = partial1Results[partial1Key];
        partial1Key++;
    }
    VectorRandomBatchSource mergeBatchSource = VectorRandomBatchSource.createInterestingBatches(random, mergeRowSource, mergeRandomRows, null);
    // We need to pass the original TypeInfo in for initializing the evaluator.
    GenericUDAFEvaluator mergeEvaluator = getEvaluator(aggregationName, typeInfo);
    /*
    System.out.println(
        "*DEBUG* GenericUDAFEvaluator for " + aggregationName + ", " + typeInfo.getTypeName() + ": " +
            mergeEvaluator.getClass().getSimpleName());
    */
    // The only way to get the return object inspector (and its return type) is to
    // initialize it...
    ObjectInspector mergeReturnOI = mergeEvaluator.init(mergeUdafEvaluatorMode, mergeParameterObjectInspectors);
    TypeInfo mergeOutputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(mergeReturnOI);
    Object[] mergeResultsArray = new Object[AggregationTestMode.count];
    executeAggregationTests(aggregationName, partial1OutputTypeInfo, mergeEvaluator, mergeOutputTypeInfo, mergeUdafEvaluatorMode, mergeMaxKeyCount, columns, columnNames, mergeParameters, mergeRandomRows, mergeRowSource, mergeBatchSource, /* tryDecimal64 */
    false, mergeResultsArray);
    verifyAggregationResults(partial1OutputTypeInfo, mergeOutputTypeInfo, mergeMaxKeyCount, mergeUdafEvaluatorMode, mergeResultsArray);
}
Also used : WritableShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) GenericUDAFEvaluator(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ArrayList(java.util.ArrayList) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 18 with VectorRandomBatchSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource in project hive by apache.

the class TestVectorCoalesceElt method doCoalesceOnRandomDataType.

private boolean doCoalesceOnRandomDataType(Random random, int iteration, boolean isCoalesce, boolean isEltIndexConst, int columnCount, int[] constantColumns, int[] nullConstantColumns, boolean allowNulls) throws Exception {
    String typeName;
    if (isCoalesce) {
        typeName = VectorRandomRowSource.getRandomTypeName(random, SupportedTypes.PRIMITIVES, /* allowedTypeNameSet */
        null);
        typeName = VectorRandomRowSource.getDecoratedTypeName(random, typeName, SupportedTypes.PRIMITIVES, /* allowedTypeNameSet */
        null, /* depth */
        0, /* maxDepth */
        2);
    } else {
        // ELT only choose between STRINGs.
        typeName = "string";
    }
    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
    // ----------------------------------------------------------------------------------------------
    final TypeInfo intTypeInfo;
    ObjectInspector intObjectInspector;
    if (isCoalesce) {
        intTypeInfo = null;
        intObjectInspector = null;
    } else {
        intTypeInfo = TypeInfoFactory.intTypeInfo;
        intObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(intTypeInfo);
    }
    ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
    // ----------------------------------------------------------------------------------------------
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    int columnNum = 1;
    if (!isCoalesce) {
        List<Object> intValueList = new ArrayList<Object>();
        for (int i = -1; i < columnCount + 2; i++) {
            intValueList.add(new IntWritable(i));
        }
        final int intValueListCount = intValueList.size();
        ExprNodeDesc intColExpr;
        if (!isEltIndexConst) {
            generationSpecList.add(GenerationSpec.createValueList(intTypeInfo, intValueList));
            explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
            String columnName = "col" + columnNum++;
            columns.add(columnName);
            intColExpr = new ExprNodeColumnDesc(intTypeInfo, columnName, "table", false);
        } else {
            final Object scalarObject;
            if (random.nextInt(10) != 0) {
                scalarObject = intValueList.get(random.nextInt(intValueListCount));
            } else {
                scalarObject = null;
            }
            intColExpr = new ExprNodeConstantDesc(typeInfo, scalarObject);
        }
        children.add(intColExpr);
    }
    for (int c = 0; c < columnCount; c++) {
        ExprNodeDesc colExpr;
        if (!contains(constantColumns, c)) {
            generationSpecList.add(GenerationSpec.createSameType(typeInfo));
            explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
            String columnName = "col" + columnNum++;
            columns.add(columnName);
            colExpr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
        } else {
            final Object scalarObject;
            if (!contains(nullConstantColumns, c)) {
                scalarObject = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo);
            } else {
                scalarObject = null;
            }
            colExpr = new ExprNodeConstantDesc(typeInfo, scalarObject);
        }
        children.add(colExpr);
    }
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    allowNulls, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    String[] columnNames = columns.toArray(new String[0]);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    final GenericUDF udf = (isCoalesce ? new GenericUDFCoalesce() : new GenericUDFElt());
    final int start = isCoalesce ? 0 : 1;
    final int end = start + columnCount;
    ObjectInspector[] argumentOIs = new ObjectInspector[end];
    if (!isCoalesce) {
        argumentOIs[0] = intObjectInspector;
    }
    for (int i = start; i < end; i++) {
        argumentOIs[i] = objectInspector;
    }
    final ObjectInspector outputObjectInspector = udf.initialize(argumentOIs);
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(typeInfo, udf, children);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[CoalesceEltTestMode.count][];
    for (int i = 0; i < CoalesceEltTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        CoalesceEltTestMode coalesceEltTestMode = CoalesceEltTestMode.values()[i];
        switch(coalesceEltTestMode) {
            case ROW_MODE:
                if (!doRowCastTest(typeInfo, columns, children, udf, exprDesc, randomRows, rowSource.rowStructObjectInspector(), exprDesc.getWritableObjectInspector(), resultObjects)) {
                    return false;
                }
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                if (!doVectorCastTest(typeInfo, iteration, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, udf, exprDesc, coalesceEltTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects)) {
                    return false;
                }
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + coalesceEltTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < CoalesceEltTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            CoalesceEltTestMode coalesceEltTestMode = CoalesceEltTestMode.values()[v];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + coalesceEltTestMode + " iteration " + iteration + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result '" + expectedResult.toString()) + "'" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + coalesceEltTestMode + " iteration " + iteration + " result '" + vectorResult.toString() + "'" + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result '" + expectedResult.toString() + "'" + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            }
        }
    }
    return true;
}
Also used : ArrayList(java.util.ArrayList) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) IntWritable(org.apache.hadoop.io.IntWritable) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFCoalesce(org.apache.hadoop.hive.ql.udf.generic.GenericUDFCoalesce) GenericUDFElt(org.apache.hadoop.hive.ql.udf.generic.GenericUDFElt) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 19 with VectorRandomBatchSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource in project hive by apache.

the class TestVectorArithmetic method doTestsWithDiffColumnScalar.

private void doTestsWithDiffColumnScalar(Random random, TypeInfo typeInfo1, TypeInfo typeInfo2, ColumnScalarMode columnScalarMode, Arithmetic arithmetic, boolean tryDecimal64) throws Exception {
    String typeName1 = typeInfo1.getTypeName();
    PrimitiveCategory primitiveCategory1 = ((PrimitiveTypeInfo) typeInfo1).getPrimitiveCategory();
    String typeName2 = typeInfo2.getTypeName();
    PrimitiveCategory primitiveCategory2 = ((PrimitiveTypeInfo) typeInfo2).getPrimitiveCategory();
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    int columnNum = 1;
    ExprNodeDesc col1Expr;
    Object scalar1Object = null;
    final boolean decimal64Enable1 = checkDecimal64(tryDecimal64, typeInfo1);
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
        generationSpecList.add(GenerationSpec.createSameType(typeInfo1));
        explicitDataTypePhysicalVariationList.add(decimal64Enable1 ? DataTypePhysicalVariation.DECIMAL_64 : DataTypePhysicalVariation.NONE);
        String columnName = "col" + (columnNum++);
        col1Expr = new ExprNodeColumnDesc(typeInfo1, columnName, "table", false);
        columns.add(columnName);
    } else {
        scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo1);
        // Adjust the decimal type to the scalar's type...
        if (typeInfo1 instanceof DecimalTypeInfo) {
            typeInfo1 = getDecimalScalarTypeInfo(scalar1Object);
        }
        col1Expr = new ExprNodeConstantDesc(typeInfo1, scalar1Object);
    }
    ExprNodeDesc col2Expr;
    Object scalar2Object = null;
    final boolean decimal64Enable2 = checkDecimal64(tryDecimal64, typeInfo2);
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
        generationSpecList.add(GenerationSpec.createSameType(typeInfo2));
        explicitDataTypePhysicalVariationList.add(decimal64Enable2 ? DataTypePhysicalVariation.DECIMAL_64 : DataTypePhysicalVariation.NONE);
        String columnName = "col" + (columnNum++);
        col2Expr = new ExprNodeColumnDesc(typeInfo2, columnName, "table", false);
        columns.add(columnName);
    } else {
        scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo2);
        // Adjust the decimal type to the scalar's type...
        if (typeInfo2 instanceof DecimalTypeInfo) {
            typeInfo2 = getDecimalScalarTypeInfo(scalar2Object);
        }
        col2Expr = new ExprNodeConstantDesc(typeInfo2, scalar2Object);
    }
    List<ObjectInspector> objectInspectorList = new ArrayList<ObjectInspector>();
    objectInspectorList.add(VectorRandomRowSource.getObjectInspector(typeInfo1));
    objectInspectorList.add(VectorRandomRowSource.getObjectInspector(typeInfo2));
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    children.add(col2Expr);
    // ----------------------------------------------------------------------------------------------
    String[] columnNames = columns.toArray(new String[0]);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    GenericUDF genericUdf;
    switch(arithmetic) {
        case ADD:
            genericUdf = new GenericUDFOPPlus();
            break;
        case SUBTRACT:
            genericUdf = new GenericUDFOPMinus();
            break;
        case MULTIPLY:
            genericUdf = new GenericUDFOPMultiply();
            break;
        case DIVIDE:
            genericUdf = new GenericUDFOPDivide();
            break;
        case MODULUS:
            genericUdf = new GenericUDFOPMod();
            break;
        default:
            throw new RuntimeException("Unexpected arithmetic " + arithmetic);
    }
    ObjectInspector[] objectInspectors = objectInspectorList.toArray(new ObjectInspector[objectInspectorList.size()]);
    ObjectInspector outputObjectInspector = null;
    try {
        outputObjectInspector = genericUdf.initialize(objectInspectors);
    } catch (Exception e) {
        Assert.fail(e.toString());
    }
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(outputTypeInfo, genericUdf, children);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[ArithmeticTestMode.count][];
    for (int i = 0; i < ArithmeticTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        ArithmeticTestMode arithmeticTestMode = ArithmeticTestMode.values()[i];
        switch(arithmeticTestMode) {
            case ROW_MODE:
                doRowArithmeticTest(typeInfo1, typeInfo2, columns, children, exprDesc, arithmetic, randomRows, columnScalarMode, rowSource.rowStructObjectInspector(), outputTypeInfo, resultObjects);
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                doVectorArithmeticTest(typeInfo1, typeInfo2, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, exprDesc, arithmetic, arithmeticTestMode, columnScalarMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects);
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + arithmeticTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < ArithmeticTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " typeName1 " + typeName1 + " typeName2 " + typeName2 + " outputTypeName " + outputTypeInfo.getTypeName() + " " + arithmetic + " " + ArithmeticTestMode.values()[v] + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ? " scalar1 " + scalar1Object.toString() : "") + " row values " + Arrays.toString(randomRows[i]) + (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ? " scalar2 " + scalar2Object.toString() : ""));
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " typeName1 " + typeName1 + " typeName2 " + typeName2 + " outputTypeName " + outputTypeInfo.getTypeName() + " " + arithmetic + " " + ArithmeticTestMode.values()[v] + " " + columnScalarMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ? " scalar1 " + scalar1Object.toString() : "") + " row values " + Arrays.toString(randomRows[i]) + (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ? " scalar2 " + scalar2Object.toString() : ""));
                }
            }
        }
    }
}
Also used : GenericUDFOPMultiply(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMultiply) ArrayList(java.util.ArrayList) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) GenericUDFOPMod(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMod) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFOPDivide(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPDivide) GenericUDFOPPlus(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus) GenericUDFOPMinus(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMinus) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 20 with VectorRandomBatchSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource in project hive by apache.

the class TestVectorStringConcat method doStringConcatTestsWithDiffColumnScalar.

private void doStringConcatTestsWithDiffColumnScalar(Random random, String stringTypeName1, String stringTypeName2, ColumnScalarMode columnScalarMode) throws Exception {
    TypeInfo stringTypeInfo1 = TypeInfoUtils.getTypeInfoFromTypeString(stringTypeName1);
    PrimitiveCategory stringPrimitiveCategory1 = ((PrimitiveTypeInfo) stringTypeInfo1).getPrimitiveCategory();
    TypeInfo stringTypeInfo2 = TypeInfoUtils.getTypeInfoFromTypeString(stringTypeName2);
    PrimitiveCategory stringPrimitiveCategory2 = ((PrimitiveTypeInfo) stringTypeInfo2).getPrimitiveCategory();
    String functionName = "concat";
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    int columnNum = 1;
    ExprNodeDesc col1Expr;
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
        generationSpecList.add(GenerationSpec.createSameType(stringTypeInfo1));
        explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
        String columnName = "col" + (columnNum++);
        col1Expr = new ExprNodeColumnDesc(stringTypeInfo1, columnName, "table", false);
        columns.add(columnName);
    } else {
        Object scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) stringTypeInfo1);
        col1Expr = new ExprNodeConstantDesc(stringTypeInfo1, scalar1Object);
    }
    ExprNodeDesc col2Expr;
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
        generationSpecList.add(GenerationSpec.createSameType(stringTypeInfo2));
        explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
        String columnName = "col" + (columnNum++);
        col2Expr = new ExprNodeColumnDesc(stringTypeInfo2, columnName, "table", false);
        columns.add(columnName);
    } else {
        Object scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) stringTypeInfo2);
        col2Expr = new ExprNodeConstantDesc(stringTypeInfo2, scalar2Object);
    }
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    children.add(col2Expr);
    // ----------------------------------------------------------------------------------------------
    String[] columnNames = columns.toArray(new String[0]);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    String[] outputScratchTypeNames = new String[] { "string" };
    VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
    null, /* partitionColumnCount */
    0, /* virtualColumnCount */
    0, /* neededVirtualColumns */
    null, outputScratchTypeNames, null);
    GenericUDF genericUdf;
    FunctionInfo funcInfo = null;
    try {
        funcInfo = FunctionRegistry.getFunctionInfo(functionName);
    } catch (SemanticException e) {
        Assert.fail("Failed to load " + functionName + " " + e);
    }
    genericUdf = funcInfo.getGenericUDF();
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[StringConcatTestMode.count][];
    for (int i = 0; i < StringConcatTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        StringConcatTestMode stringConcatTestMode = StringConcatTestMode.values()[i];
        switch(stringConcatTestMode) {
            case ROW_MODE:
                doRowStringConcatTest(stringTypeInfo1, stringTypeInfo2, columns, children, randomRows, columnScalarMode, rowSource.rowStructObjectInspector(), genericUdf, resultObjects);
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                doVectorStringConcatTest(stringTypeInfo1, stringTypeInfo2, columns, rowSource.typeInfos(), children, stringConcatTestMode, columnScalarMode, batchSource, batchContext, rowSource.rowStructObjectInspector(), genericUdf, resultObjects);
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + stringConcatTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < StringConcatTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " " + StringConcatTestMode.values()[v] + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i]));
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " " + StringConcatTestMode.values()[v] + " " + columnScalarMode + " result \"" + vectorResult.toString() + "\"" + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result \"" + expectedResult.toString() + "\"" + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
                }
            }
        }
    }
}
Also used : ArrayList(java.util.ArrayList) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) FunctionInfo(org.apache.hadoop.hive.ql.exec.FunctionInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) VectorizedRowBatchCtx(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Aggregations

VectorRandomBatchSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource)22 ArrayList (java.util.ArrayList)21 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)19 VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)19 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)19 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)19 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)19 GenerationSpec (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec)18 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)16 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)16 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)14 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)11 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)11 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)11 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)8 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)6 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)6 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)6 VectorizedRowBatchCtx (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx)5 FunctionInfo (org.apache.hadoop.hive.ql.exec.FunctionInfo)4