Search in sources :

Example 6 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorAggregation method doTests.

private void doTests(Random random, String aggregationName, TypeInfo typeInfo, boolean isCountStar, boolean tryDecimal64) throws Exception {
    List<GenerationSpec> dataAggrGenerationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    TypeInfo keyTypeInfo = TypeInfoFactory.shortTypeInfo;
    GenerationSpec keyGenerationSpec = GenerationSpec.createOmitGeneration(keyTypeInfo);
    dataAggrGenerationSpecList.add(keyGenerationSpec);
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    final boolean decimal64Enable = checkDecimal64(tryDecimal64, typeInfo);
    GenerationSpec generationSpec = GenerationSpec.createSameType(typeInfo);
    dataAggrGenerationSpecList.add(generationSpec);
    explicitDataTypePhysicalVariationList.add(decimal64Enable ? DataTypePhysicalVariation.DECIMAL_64 : DataTypePhysicalVariation.NONE);
    List<String> columns = new ArrayList<String>();
    columns.add("col0");
    columns.add("col1");
    ExprNodeColumnDesc dataAggrCol1Expr = new ExprNodeColumnDesc(typeInfo, "col1", "table", false);
    List<ExprNodeDesc> dataAggrParameters = new ArrayList<ExprNodeDesc>();
    if (!isCountStar) {
        dataAggrParameters.add(dataAggrCol1Expr);
    }
    final int dataAggrParameterCount = dataAggrParameters.size();
    ObjectInspector[] dataAggrParameterObjectInspectors = new ObjectInspector[dataAggrParameterCount];
    for (int i = 0; i < dataAggrParameterCount; i++) {
        TypeInfo paramTypeInfo = dataAggrParameters.get(i).getTypeInfo();
        dataAggrParameterObjectInspectors[i] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(paramTypeInfo);
    }
    String[] columnNames = columns.toArray(new String[0]);
    final int dataAggrMaxKeyCount = 20000;
    final int reductionFactor = 16;
    ObjectInspector keyObjectInspector = VectorRandomRowSource.getObjectInspector(keyTypeInfo);
    /*
     * PARTIAL1.
     */
    VectorRandomRowSource partial1RowSource = new VectorRandomRowSource();
    boolean allowNull = !aggregationName.equals("bloom_filter");
    partial1RowSource.initGenerationSpecSchema(random, dataAggrGenerationSpecList, /* maxComplexDepth */
    0, allowNull, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    Object[][] partial1RandomRows = partial1RowSource.randomRows(TEST_ROW_COUNT);
    final int partial1RowCount = partial1RandomRows.length;
    for (int i = 0; i < partial1RowCount; i++) {
        final short shortKey = (short) getLinearRandomNumber(random, dataAggrMaxKeyCount);
        partial1RandomRows[i][0] = ((WritableShortObjectInspector) keyObjectInspector).create((short) shortKey);
    }
    VectorRandomBatchSource partial1BatchSource = VectorRandomBatchSource.createInterestingBatches(random, partial1RowSource, partial1RandomRows, null);
    GenericUDAFEvaluator partial1Evaluator = getEvaluator(aggregationName, typeInfo);
    if (isCountStar) {
        Assert.assertTrue(partial1Evaluator instanceof GenericUDAFCountEvaluator);
        GenericUDAFCountEvaluator countEvaluator = (GenericUDAFCountEvaluator) partial1Evaluator;
        countEvaluator.setCountAllColumns(true);
    }
    /*
    System.out.println(
        "*DEBUG* GenericUDAFEvaluator for " + aggregationName + ", " + typeInfo.getTypeName() + ": " +
            partial1Evaluator.getClass().getSimpleName());
    */
    // The only way to get the return object inspector (and its return type) is to
    // initialize it...
    final GenericUDAFEvaluator.Mode partial1UdafEvaluatorMode = GenericUDAFEvaluator.Mode.PARTIAL1;
    ObjectInspector partial1ReturnOI = partial1Evaluator.init(partial1UdafEvaluatorMode, dataAggrParameterObjectInspectors);
    TypeInfo partial1OutputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(partial1ReturnOI);
    Object[] partial1ResultsArray = new Object[AggregationTestMode.count];
    executeAggregationTests(aggregationName, typeInfo, partial1Evaluator, partial1OutputTypeInfo, partial1UdafEvaluatorMode, dataAggrMaxKeyCount, columns, columnNames, dataAggrParameters, partial1RandomRows, partial1RowSource, partial1BatchSource, tryDecimal64, partial1ResultsArray);
    verifyAggregationResults(typeInfo, partial1OutputTypeInfo, dataAggrMaxKeyCount, partial1UdafEvaluatorMode, partial1ResultsArray);
    final boolean hasDifferentCompleteExpr;
    if (varianceNames.contains(aggregationName)) {
        hasDifferentCompleteExpr = true;
    } else {
        switch(aggregationName) {
            case "avg":
                hasDifferentCompleteExpr = true;
                break;
            case "bloom_filter":
            case "count":
            case "max":
            case "min":
            case "sum":
                hasDifferentCompleteExpr = false;
                break;
            default:
                throw new RuntimeException("Unexpected aggregation name " + aggregationName);
        }
    }
    if (hasDifferentCompleteExpr) {
        /*
       * COMPLETE.
       */
        VectorRandomRowSource completeRowSource = new VectorRandomRowSource();
        completeRowSource.initGenerationSpecSchema(random, dataAggrGenerationSpecList, /* maxComplexDepth */
        0, /* allowNull */
        true, /* isUnicodeOk */
        true, explicitDataTypePhysicalVariationList);
        Object[][] completeRandomRows = completeRowSource.randomRows(TEST_ROW_COUNT);
        final int completeRowCount = completeRandomRows.length;
        for (int i = 0; i < completeRowCount; i++) {
            final short shortKey = (short) getLinearRandomNumber(random, dataAggrMaxKeyCount);
            completeRandomRows[i][0] = ((WritableShortObjectInspector) keyObjectInspector).create((short) shortKey);
        }
        VectorRandomBatchSource completeBatchSource = VectorRandomBatchSource.createInterestingBatches(random, completeRowSource, completeRandomRows, null);
        GenericUDAFEvaluator completeEvaluator = getEvaluator(aggregationName, typeInfo);
        /*
      System.out.println(
          "*DEBUG* GenericUDAFEvaluator for " + aggregationName + ", " + typeInfo.getTypeName() + ": " +
              completeEvaluator.getClass().getSimpleName());
      */
        // The only way to get the return object inspector (and its return type) is to
        // initialize it...
        final GenericUDAFEvaluator.Mode completeUdafEvaluatorMode = GenericUDAFEvaluator.Mode.COMPLETE;
        ObjectInspector completeReturnOI = completeEvaluator.init(completeUdafEvaluatorMode, dataAggrParameterObjectInspectors);
        TypeInfo completeOutputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(completeReturnOI);
        Object[] completeResultsArray = new Object[AggregationTestMode.count];
        executeAggregationTests(aggregationName, typeInfo, completeEvaluator, completeOutputTypeInfo, completeUdafEvaluatorMode, dataAggrMaxKeyCount, columns, columnNames, dataAggrParameters, completeRandomRows, completeRowSource, completeBatchSource, tryDecimal64, completeResultsArray);
        verifyAggregationResults(typeInfo, completeOutputTypeInfo, dataAggrMaxKeyCount, completeUdafEvaluatorMode, completeResultsArray);
    }
    final boolean hasDifferentPartial2Expr;
    if (varianceNames.contains(aggregationName)) {
        hasDifferentPartial2Expr = true;
    } else {
        switch(aggregationName) {
            case "avg":
                hasDifferentPartial2Expr = true;
                break;
            case "bloom_filter":
            case "count":
            case "max":
            case "min":
            case "sum":
                hasDifferentPartial2Expr = false;
                break;
            default:
                throw new RuntimeException("Unexpected aggregation name " + aggregationName);
        }
    }
    if (hasDifferentPartial2Expr) {
        /*
       * PARTIAL2.
       */
        final GenericUDAFEvaluator.Mode mergeUdafEvaluatorMode = GenericUDAFEvaluator.Mode.PARTIAL2;
        doMerge(mergeUdafEvaluatorMode, random, aggregationName, typeInfo, keyGenerationSpec, columns, columnNames, dataAggrMaxKeyCount, reductionFactor, partial1OutputTypeInfo, partial1ResultsArray);
    }
    final boolean hasDifferentFinalExpr;
    if (varianceNames.contains(aggregationName)) {
        hasDifferentFinalExpr = true;
    } else {
        switch(aggregationName) {
            case "avg":
                hasDifferentFinalExpr = true;
                break;
            case "bloom_filter":
            case "count":
                hasDifferentFinalExpr = true;
                break;
            case "max":
            case "min":
            case "sum":
                hasDifferentFinalExpr = false;
                break;
            default:
                throw new RuntimeException("Unexpected aggregation name " + aggregationName);
        }
    }
    if (hasDifferentFinalExpr) {
        /*
       * FINAL.
       */
        final GenericUDAFEvaluator.Mode mergeUdafEvaluatorMode = GenericUDAFEvaluator.Mode.FINAL;
        doMerge(mergeUdafEvaluatorMode, random, aggregationName, typeInfo, keyGenerationSpec, columns, columnNames, dataAggrMaxKeyCount, reductionFactor, partial1OutputTypeInfo, partial1ResultsArray);
    }
}
Also used : GenericUDAFEvaluator(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator) ArrayList(java.util.ArrayList) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) WritableShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) GenericUDAFCountEvaluator(org.apache.hadoop.hive.ql.udf.generic.GenericUDAFCount.GenericUDAFCountEvaluator) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 7 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorDateAddSub method doDateAddSubTestsWithDiffColumnScalar.

private void doDateAddSubTestsWithDiffColumnScalar(Random random, String dateTimeStringTypeName, String integerTypeName, ColumnScalarMode columnScalarMode, boolean isAdd) throws Exception {
    TypeInfo dateTimeStringTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(dateTimeStringTypeName);
    PrimitiveCategory dateTimeStringPrimitiveCategory = ((PrimitiveTypeInfo) dateTimeStringTypeInfo).getPrimitiveCategory();
    boolean isStringFamily = (dateTimeStringPrimitiveCategory == PrimitiveCategory.STRING || dateTimeStringPrimitiveCategory == PrimitiveCategory.CHAR || dateTimeStringPrimitiveCategory == PrimitiveCategory.VARCHAR);
    TypeInfo integerTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(integerTypeName);
    PrimitiveCategory integerPrimitiveCategory = ((PrimitiveTypeInfo) integerTypeInfo).getPrimitiveCategory();
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    int columnNum = 1;
    ExprNodeDesc col1Expr;
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
        if (!isStringFamily) {
            generationSpecList.add(GenerationSpec.createSameType(dateTimeStringTypeInfo));
        } else {
            generationSpecList.add(GenerationSpec.createStringFamilyOtherTypeValue(dateTimeStringTypeInfo, TypeInfoFactory.dateTypeInfo));
        }
        explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
        String columnName = "col" + (columnNum++);
        col1Expr = new ExprNodeColumnDesc(dateTimeStringTypeInfo, columnName, "table", false);
        columns.add(columnName);
    } else {
        Object scalar1Object;
        if (!isStringFamily) {
            scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) dateTimeStringTypeInfo);
        } else {
            scalar1Object = VectorRandomRowSource.randomStringFamilyOtherTypeValue(random, dateTimeStringTypeInfo, TypeInfoFactory.dateTypeInfo, false);
        }
        col1Expr = new ExprNodeConstantDesc(dateTimeStringTypeInfo, scalar1Object);
    }
    ExprNodeDesc col2Expr;
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
        generationSpecList.add(GenerationSpec.createSameType(integerTypeInfo));
        explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
        String columnName = "col" + (columnNum++);
        col2Expr = new ExprNodeColumnDesc(integerTypeInfo, columnName, "table", false);
        columns.add(columnName);
    } else {
        Object scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) integerTypeInfo);
        scalar2Object = smallerRange(random, integerPrimitiveCategory, /* wantWritable */
        false);
        col2Expr = new ExprNodeConstantDesc(integerTypeInfo, scalar2Object);
    }
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    children.add(col2Expr);
    // ----------------------------------------------------------------------------------------------
    String[] columnNames = columns.toArray(new String[0]);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    Object[][] randomRows = rowSource.randomRows(100000);
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
        // Fixup numbers to limit the range to 0 ... N-1.
        for (int i = 0; i < randomRows.length; i++) {
            Object[] row = randomRows[i];
            if (row[columnNum - 2] != null) {
                row[columnNum - 2] = smallerRange(random, integerPrimitiveCategory, /* wantWritable */
                true);
            }
        }
    }
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    String[] outputScratchTypeNames = new String[] { "date" };
    VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
    null, /* partitionColumnCount */
    0, /* virtualColumnCount */
    0, /* neededVirtualColumns */
    null, outputScratchTypeNames, null);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[DateAddSubTestMode.count][];
    for (int i = 0; i < DateAddSubTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        GenericUDF udf = (isAdd ? new GenericUDFDateAdd() : new GenericUDFDateSub());
        ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.dateTypeInfo, udf, children);
        DateAddSubTestMode dateAddSubTestMode = DateAddSubTestMode.values()[i];
        switch(dateAddSubTestMode) {
            case ROW_MODE:
                doRowDateAddSubTest(dateTimeStringTypeInfo, integerTypeInfo, columns, children, isAdd, exprDesc, randomRows, columnScalarMode, rowSource.rowStructObjectInspector(), resultObjects);
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                doVectorDateAddSubTest(dateTimeStringTypeInfo, integerTypeInfo, columns, rowSource.typeInfos(), children, isAdd, exprDesc, dateAddSubTestMode, columnScalarMode, batchSource, batchContext, resultObjects);
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + dateAddSubTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < DateAddSubTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " " + DateAddSubTestMode.values()[v] + " isAdd " + isAdd + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i]));
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " " + DateAddSubTestMode.values()[v] + " isAdd " + isAdd + " " + columnScalarMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
                }
            }
        }
    }
}
Also used : ArrayList(java.util.ArrayList) GenericUDFDateAdd(org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateAdd) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) GenericUDFDateSub(org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateSub) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) VectorizedRowBatchCtx(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 8 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorStructField method doOneStructFieldTest.

private void doOneStructFieldTest(Random random, StructTypeInfo structTypeInfo, String structTypeName, int fieldIndex) throws Exception {
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    int columnNum = 1;
    generationSpecList.add(GenerationSpec.createSameType(structTypeInfo));
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    ExprNodeDesc col1Expr;
    String columnName = "col" + (columnNum++);
    col1Expr = new ExprNodeColumnDesc(structTypeInfo, columnName, "table", false);
    columns.add(columnName);
    ObjectInspector structObjectInspector = VectorRandomRowSource.getObjectInspector(structTypeInfo);
    List<ObjectInspector> objectInspectorList = new ArrayList<ObjectInspector>();
    objectInspectorList.add(structObjectInspector);
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    // ----------------------------------------------------------------------------------------------
    String[] columnNames = columns.toArray(new String[0]);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    List<String> fieldNameList = structTypeInfo.getAllStructFieldNames();
    List<TypeInfo> fieldTypeInfoList = structTypeInfo.getAllStructFieldTypeInfos();
    String randomFieldName = fieldNameList.get(fieldIndex);
    TypeInfo outputTypeInfo = fieldTypeInfoList.get(fieldIndex);
    ExprNodeFieldDesc exprNodeFieldDesc = new ExprNodeFieldDesc(outputTypeInfo, col1Expr, randomFieldName, /* isList */
    false);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[StructFieldTestMode.count][];
    for (int i = 0; i < StructFieldTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        StructFieldTestMode negativeTestMode = StructFieldTestMode.values()[i];
        switch(negativeTestMode) {
            case ROW_MODE:
                doRowStructFieldTest(structTypeInfo, columns, children, exprNodeFieldDesc, randomRows, rowSource.rowStructObjectInspector(), outputTypeInfo, resultObjects);
                break;
            case VECTOR_EXPRESSION:
                doVectorStructFieldTest(structTypeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, exprNodeFieldDesc, negativeTestMode, batchSource, exprNodeFieldDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects);
                break;
            default:
                throw new RuntimeException("Unexpected Negative operator test mode " + negativeTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < StructFieldTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " structTypeName " + structTypeName + " outputTypeName " + outputTypeInfo.getTypeName() + " " + StructFieldTestMode.values()[v] + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i]));
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " structTypeName " + structTypeName + " outputTypeName " + outputTypeInfo.getTypeName() + " " + StructFieldTestMode.values()[v] + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
                }
            }
        }
    }
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ArrayList(java.util.ArrayList) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeFieldDesc(org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 9 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorDateDiff method doDateDiffTestsWithDiffColumnScalar.

private void doDateDiffTestsWithDiffColumnScalar(Random random, String dateTimeStringTypeName1, String dateTimeStringTypeName2, ColumnScalarMode columnScalarMode) throws Exception {
    TypeInfo dateTimeStringTypeInfo1 = TypeInfoUtils.getTypeInfoFromTypeString(dateTimeStringTypeName1);
    PrimitiveCategory dateTimeStringPrimitiveCategory1 = ((PrimitiveTypeInfo) dateTimeStringTypeInfo1).getPrimitiveCategory();
    boolean isStringFamily1 = (dateTimeStringPrimitiveCategory1 == PrimitiveCategory.STRING || dateTimeStringPrimitiveCategory1 == PrimitiveCategory.CHAR || dateTimeStringPrimitiveCategory1 == PrimitiveCategory.VARCHAR);
    TypeInfo dateTimeStringTypeInfo2 = TypeInfoUtils.getTypeInfoFromTypeString(dateTimeStringTypeName2);
    PrimitiveCategory dateTimeStringPrimitiveCategory2 = ((PrimitiveTypeInfo) dateTimeStringTypeInfo2).getPrimitiveCategory();
    boolean isStringFamily2 = (dateTimeStringPrimitiveCategory2 == PrimitiveCategory.STRING || dateTimeStringPrimitiveCategory2 == PrimitiveCategory.CHAR || dateTimeStringPrimitiveCategory2 == PrimitiveCategory.VARCHAR);
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    int columnNum = 1;
    ExprNodeDesc col1Expr;
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
        if (!isStringFamily1) {
            generationSpecList.add(GenerationSpec.createSameType(dateTimeStringTypeInfo1));
        } else {
            generationSpecList.add(GenerationSpec.createStringFamilyOtherTypeValue(dateTimeStringTypeInfo1, TypeInfoFactory.dateTypeInfo));
        }
        explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
        String columnName = "col" + (columnNum++);
        col1Expr = new ExprNodeColumnDesc(dateTimeStringTypeInfo1, columnName, "table", false);
        columns.add(columnName);
    } else {
        Object scalar1Object;
        if (!isStringFamily1) {
            scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) dateTimeStringTypeInfo1);
        } else {
            scalar1Object = VectorRandomRowSource.randomStringFamilyOtherTypeValue(random, dateTimeStringTypeInfo1, TypeInfoFactory.dateTypeInfo, false);
        }
        col1Expr = new ExprNodeConstantDesc(dateTimeStringTypeInfo1, scalar1Object);
    }
    ExprNodeDesc col2Expr;
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
        if (!isStringFamily2) {
            generationSpecList.add(GenerationSpec.createSameType(dateTimeStringTypeInfo2));
        } else {
            generationSpecList.add(GenerationSpec.createStringFamilyOtherTypeValue(dateTimeStringTypeInfo2, TypeInfoFactory.dateTypeInfo));
        }
        explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
        String columnName = "col" + (columnNum++);
        col2Expr = new ExprNodeColumnDesc(dateTimeStringTypeInfo2, columnName, "table", false);
        columns.add(columnName);
    } else {
        Object scalar2Object;
        if (!isStringFamily2) {
            scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) dateTimeStringTypeInfo2);
        } else {
            scalar2Object = VectorRandomRowSource.randomStringFamilyOtherTypeValue(random, dateTimeStringTypeInfo2, TypeInfoFactory.dateTypeInfo, false);
        }
        col2Expr = new ExprNodeConstantDesc(dateTimeStringTypeInfo2, scalar2Object);
    }
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    children.add(col2Expr);
    // ----------------------------------------------------------------------------------------------
    String[] columnNames = columns.toArray(new String[0]);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    String[] outputScratchTypeNames = new String[] { "date" };
    VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
    null, /* partitionColumnCount */
    0, /* virtualColumnCount */
    0, /* neededVirtualColumns */
    null, outputScratchTypeNames, null);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[DateDiffTestMode.count][];
    for (int i = 0; i < DateDiffTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        GenericUDF udf = new GenericUDFDateDiff();
        ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, children);
        DateDiffTestMode dateDiffTestMode = DateDiffTestMode.values()[i];
        switch(dateDiffTestMode) {
            case ROW_MODE:
                doRowDateAddSubTest(dateTimeStringTypeInfo1, dateTimeStringTypeInfo2, columns, children, exprDesc, randomRows, columnScalarMode, rowSource.rowStructObjectInspector(), resultObjects);
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                doVectorDateAddSubTest(dateTimeStringTypeInfo1, dateTimeStringTypeInfo2, columns, rowSource.typeInfos(), children, exprDesc, dateDiffTestMode, columnScalarMode, batchSource, batchContext, resultObjects);
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + dateDiffTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < DateDiffTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " " + DateDiffTestMode.values()[v] + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i]));
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " " + DateDiffTestMode.values()[v] + " " + columnScalarMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
                }
            }
        }
    }
}
Also used : ArrayList(java.util.ArrayList) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) GenericUDFDateDiff(org.apache.hadoop.hive.ql.udf.generic.GenericUDFDateDiff) VectorizedRowBatchCtx(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 10 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorNull method doIsNullOnRandomDataType.

private boolean doIsNullOnRandomDataType(Random random, String functionName, boolean isFilter) throws Exception {
    String typeName;
    if (functionName.equals("not")) {
        typeName = "boolean";
    } else {
        typeName = VectorRandomRowSource.getRandomTypeName(random, SupportedTypes.ALL, /* allowedTypeNameSet */
        null);
        typeName = VectorRandomRowSource.getDecoratedTypeName(random, typeName, SupportedTypes.ALL, /* allowedTypeNameSet */
        null, /* depth */
        0, /* maxDepth */
        2);
    }
    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
    // ----------------------------------------------------------------------------------------------
    ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
    // ----------------------------------------------------------------------------------------------
    GenerationSpec generationSpec = GenerationSpec.createSameType(typeInfo);
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    generationSpecList.add(generationSpec);
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    List<String> columns = new ArrayList<String>();
    columns.add("col1");
    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(typeInfo, "col1", "table", false);
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    String[] columnNames = columns.toArray(new String[0]);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    final GenericUDF udf;
    final ObjectInspector outputObjectInspector;
    switch(functionName) {
        case "isnull":
            udf = new GenericUDFOPNull();
            break;
        case "isnotnull":
            udf = new GenericUDFOPNotNull();
            break;
        case "not":
            udf = new GenericUDFOPNot();
            break;
        default:
            throw new RuntimeException("Unexpected function name " + functionName);
    }
    ObjectInspector[] argumentOIs = new ObjectInspector[] { objectInspector };
    outputObjectInspector = udf.initialize(argumentOIs);
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[NullTestMode.count][];
    for (int i = 0; i < NullTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        NullTestMode nullTestMode = NullTestMode.values()[i];
        switch(nullTestMode) {
            case ROW_MODE:
                if (!doRowCastTest(typeInfo, isFilter, columns, children, udf, exprDesc, randomRows, rowSource.rowStructObjectInspector(), resultObjects)) {
                    return false;
                }
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                if (!doVectorCastTest(typeInfo, isFilter, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, udf, exprDesc, nullTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects)) {
                    return false;
                }
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + nullTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < NullTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            NullTestMode nullTestMode = NullTestMode.values()[v];
            if (isFilter && expectedResult == null && vectorResult != null) {
                // This is OK.
                boolean vectorBoolean = ((BooleanWritable) vectorResult).get();
                if (vectorBoolean) {
                    Assert.fail("Row " + i + " typeName " + typeName + " outputTypeName " + outputTypeInfo.getTypeName() + " isFilter " + isFilter + " " + nullTestMode + " result is NOT NULL and true" + " does not match row-mode expected result is NULL which means false here" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            } else if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " isFilter " + isFilter + " " + nullTestMode + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " isFilter " + isFilter + " " + nullTestMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            }
        }
    }
    return true;
}
Also used : ArrayList(java.util.ArrayList) GenericUDFOPNotNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) GenericUDFOPNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) BooleanWritable(org.apache.hadoop.io.BooleanWritable) GenericUDFOPNot(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Aggregations

VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)50 Random (java.util.Random)24 VerifyFastRowHashMap (org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap)24 Test (org.junit.Test)24 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)20 ArrayList (java.util.ArrayList)19 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)19 VectorRandomBatchSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource)19 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)19 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)19 GenerationSpec (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec)18 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)17 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)16 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)15 VectorizedRowBatchCtx (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx)12 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)12 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)11 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)11 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)8 HiveConf (org.apache.hadoop.hive.conf.HiveConf)7