Search in sources :

Example 11 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorNull method doVectorCastTest.

private boolean doVectorCastTest(TypeInfo typeInfo, boolean isFilter, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, GenericUDF udf, ExprNodeGenericFuncDesc exprDesc, NullTestMode nullTestMode, VectorRandomBatchSource batchSource, ObjectInspector objectInspector, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception {
    HiveConf hiveConf = new HiveConf();
    if (nullTestMode == NullTestMode.ADAPTOR) {
        hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
    }
    VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
    VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc, (isFilter ? VectorExpressionDescriptor.Mode.FILTER : VectorExpressionDescriptor.Mode.PROJECTION));
    vectorExpression.transientInit(hiveConf);
    if (nullTestMode == NullTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
        System.out.println("*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " nullTestMode " + nullTestMode + " isFilter " + isFilter + " vectorExpression " + vectorExpression.toString());
    }
    // System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
    /*
    System.out.println(
        "*DEBUG* typeInfo " + typeInfo.toString() +
        " nullTestMode " + nullTestMode +
        " isFilter " + isFilter +
        " vectorExpression " + vectorExpression.toString());
    */
    VectorRandomRowSource rowSource = batchSource.getRowSource();
    VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
    null, /* partitionColumnCount */
    0, /* virtualColumnCount */
    0, /* neededVirtualColumns */
    null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
    VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
    VectorExtractRow resultVectorExtractRow = null;
    Object[] scrqtchRow = null;
    if (!isFilter) {
        resultVectorExtractRow = new VectorExtractRow();
        final int outputColumnNum = vectorExpression.getOutputColumnNum();
        resultVectorExtractRow.init(new TypeInfo[] { outputTypeInfo }, new int[] { outputColumnNum });
        scrqtchRow = new Object[1];
    }
    boolean copySelectedInUse = false;
    int[] copySelected = new int[VectorizedRowBatch.DEFAULT_SIZE];
    batchSource.resetBatchIteration();
    int rowIndex = 0;
    while (true) {
        if (!batchSource.fillNextBatch(batch)) {
            break;
        }
        final int originalBatchSize = batch.size;
        if (isFilter) {
            copySelectedInUse = batch.selectedInUse;
            if (batch.selectedInUse) {
                System.arraycopy(batch.selected, 0, copySelected, 0, originalBatchSize);
            }
        }
        // In filter mode, the batch size can be made smaller.
        vectorExpression.evaluate(batch);
        if (!isFilter) {
            extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, objectInspector, resultObjects);
        } else {
            final int currentBatchSize = batch.size;
            if (copySelectedInUse && batch.selectedInUse) {
                int selectIndex = 0;
                for (int i = 0; i < originalBatchSize; i++) {
                    final int originalBatchIndex = copySelected[i];
                    final boolean booleanResult;
                    if (selectIndex < currentBatchSize && batch.selected[selectIndex] == originalBatchIndex) {
                        booleanResult = true;
                        selectIndex++;
                    } else {
                        booleanResult = false;
                    }
                    resultObjects[rowIndex + i] = new BooleanWritable(booleanResult);
                }
            } else if (batch.selectedInUse) {
                int selectIndex = 0;
                for (int i = 0; i < originalBatchSize; i++) {
                    final boolean booleanResult;
                    if (selectIndex < currentBatchSize && batch.selected[selectIndex] == i) {
                        booleanResult = true;
                        selectIndex++;
                    } else {
                        booleanResult = false;
                    }
                    resultObjects[rowIndex + i] = new BooleanWritable(booleanResult);
                }
            } else if (currentBatchSize == 0) {
                // Whole batch got zapped.
                for (int i = 0; i < originalBatchSize; i++) {
                    resultObjects[rowIndex + i] = new BooleanWritable(false);
                }
            } else {
                // Every row kept.
                for (int i = 0; i < originalBatchSize; i++) {
                    resultObjects[rowIndex + i] = new BooleanWritable(true);
                }
            }
        }
        rowIndex += originalBatchSize;
    }
    return true;
}
Also used : VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) VectorUDFAdaptor(org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor) VectorExtractRow(org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow) VectorizedRowBatchCtx(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx) VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveConf(org.apache.hadoop.hive.conf.HiveConf) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 12 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorSubStr method doTests.

private void doTests(Random random, boolean useLength) throws Exception {
    String typeName = "string";
    TypeInfo typeInfo = TypeInfoFactory.stringTypeInfo;
    TypeInfo targetTypeInfo = typeInfo;
    String functionName = "substr";
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    int columnNum = 1;
    ExprNodeDesc col1Expr;
    StringGenerationOption stringGenerationOption = new StringGenerationOption(true, true);
    generationSpecList.add(GenerationSpec.createStringFamily(typeInfo, stringGenerationOption));
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    String columnName = "col" + (columnNum++);
    col1Expr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
    columns.add(columnName);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    final int position = 10 - random.nextInt(21);
    Object scalar2Object = Integer.valueOf(position);
    ExprNodeDesc col2Expr = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, scalar2Object);
    children.add(col2Expr);
    if (useLength) {
        Object scalar3Object = random.nextInt(12);
        ExprNodeDesc col3Expr = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, scalar3Object);
        children.add(col3Expr);
    }
    // ----------------------------------------------------------------------------------------------
    String[] columnNames = columns.toArray(new String[0]);
    String[] outputScratchTypeNames = new String[] { targetTypeInfo.getTypeName() };
    DataTypePhysicalVariation[] outputDataTypePhysicalVariations = new DataTypePhysicalVariation[] { DataTypePhysicalVariation.NONE };
    VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
    null, /* partitionColumnCount */
    0, /* virtualColumnCount */
    0, /* neededVirtualColumns */
    null, outputScratchTypeNames, outputDataTypePhysicalVariations);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    GenericUDF genericUdf;
    FunctionInfo funcInfo = null;
    try {
        funcInfo = FunctionRegistry.getFunctionInfo(functionName);
    } catch (SemanticException e) {
        Assert.fail("Failed to load " + functionName + " " + e);
    }
    genericUdf = funcInfo.getGenericUDF();
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[SubStrTestMode.count][];
    for (int i = 0; i < SubStrTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        SubStrTestMode subStrTestMode = SubStrTestMode.values()[i];
        switch(subStrTestMode) {
            case ROW_MODE:
                doRowIfTest(typeInfo, targetTypeInfo, columns, children, randomRows, rowSource.rowStructObjectInspector(), genericUdf, resultObjects);
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                doVectorIfTest(typeInfo, targetTypeInfo, columns, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, subStrTestMode, batchSource, batchContext, genericUdf, resultObjects);
                break;
            default:
                throw new RuntimeException("Unexpected STRING Unary test mode " + subStrTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < SubStrTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " " + SubStrTestMode.values()[v] + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]));
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " " + SubStrTestMode.values()[v] + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
                }
            }
        }
    }
}
Also used : ArrayList(java.util.ArrayList) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) FunctionInfo(org.apache.hadoop.hive.ql.exec.FunctionInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) VectorizedRowBatchCtx(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) StringGenerationOption(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.StringGenerationOption) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 13 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorIndex method doIndexOnRandomDataType.

private boolean doIndexOnRandomDataType(Random random, boolean isList, String keyTypeName, String elementRootTypeName, boolean allowNulls, boolean isScalarIndex) throws Exception {
    String elementTypeName = VectorRandomRowSource.getDecoratedTypeName(random, elementRootTypeName, SupportedTypes.ALL, /* allowedTypeNameSet */
    null, /* depth */
    0, /* maxDepth */
    3);
    TypeInfo elementTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(elementTypeName);
    ObjectInspector elementObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(elementTypeInfo);
    // ----------------------------------------------------------------------------------------------
    final TypeInfo keyTypeInfo;
    if (isList) {
        keyTypeInfo = TypeInfoFactory.intTypeInfo;
    } else {
        keyTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(keyTypeName);
    }
    final ObjectInspector keyObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(keyTypeInfo);
    Object exampleObject = (isList ? ((WritableIntObjectInspector) keyObjectInspector).create(0) : VectorRandomRowSource.randomWritable(random, keyTypeInfo, keyObjectInspector, DataTypePhysicalVariation.NONE, /* allowNull */
    false));
    WritableComparator writableComparator = WritableComparator.get((Class<? extends WritableComparable>) exampleObject.getClass());
    final int allKeyCount = 10 + random.nextInt(10);
    final int keyCount = 5 + random.nextInt(allKeyCount / 2);
    List<Object> allKeyList = new ArrayList<Object>(allKeyCount);
    Set<Object> allKeyTreeSet = new TreeSet<Object>(writableComparator);
    int fillAllKeyCount = 0;
    while (fillAllKeyCount < allKeyCount) {
        Object object;
        if (isList) {
            WritableIntObjectInspector writableOI = (WritableIntObjectInspector) keyObjectInspector;
            int index = random.nextInt(keyCount);
            object = writableOI.create(index);
            while (allKeyTreeSet.contains(object)) {
                index = (random.nextBoolean() ? random.nextInt() : (random.nextBoolean() ? -1 : keyCount));
                object = writableOI.create(index);
            }
        } else {
            do {
                object = VectorRandomRowSource.randomWritable(random, keyTypeInfo, keyObjectInspector, DataTypePhysicalVariation.NONE, /* allowNull */
                false);
            } while (allKeyTreeSet.contains(object));
        }
        allKeyList.add(object);
        allKeyTreeSet.add(object);
        fillAllKeyCount++;
    }
    List<Object> keyList = new ArrayList<Object>();
    Set<Object> keyTreeSet = new TreeSet<Object>(writableComparator);
    int fillKeyCount = 0;
    while (fillKeyCount < keyCount) {
        Object newKey = allKeyList.get(random.nextInt(allKeyCount));
        if (keyTreeSet.contains(newKey)) {
            continue;
        }
        keyList.add(newKey);
        keyTreeSet.add(newKey);
        fillKeyCount++;
    }
    // ----------------------------------------------------------------------------------------------
    final TypeInfo typeInfo;
    if (isList) {
        ListTypeInfo listTypeInfo = new ListTypeInfo();
        listTypeInfo.setListElementTypeInfo(elementTypeInfo);
        typeInfo = listTypeInfo;
    } else {
        MapTypeInfo mapTypeInfo = new MapTypeInfo();
        mapTypeInfo.setMapKeyTypeInfo(keyTypeInfo);
        mapTypeInfo.setMapValueTypeInfo(elementTypeInfo);
        typeInfo = mapTypeInfo;
    }
    final String typeName = typeInfo.getTypeName();
    final ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
    // ----------------------------------------------------------------------------------------------
    GenerationSpec generationSpec = GenerationSpec.createSameType(typeInfo);
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    int columnNum = 1;
    ExprNodeDesc keyColExpr;
    if (!isScalarIndex) {
        generationSpecList.add(GenerationSpec.createValueList(keyTypeInfo, keyList));
        explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
        String columnName = "col" + columnNum++;
        columns.add(columnName);
        keyColExpr = new ExprNodeColumnDesc(keyTypeInfo, columnName, "table", false);
    } else {
        Object scalarWritable = keyList.get(random.nextInt(keyCount));
        final Object scalarObject = VectorRandomRowSource.getNonWritableObject(scalarWritable, keyTypeInfo, keyObjectInspector);
        keyColExpr = new ExprNodeConstantDesc(keyTypeInfo, scalarObject);
    }
    /*
    System.out.println("*DEBUG* typeName " + typeName);
    System.out.println("*DEBUG* keyColExpr " + keyColExpr.toString());
    System.out.println("*DEBUG* keyList " + keyList.toString());
    System.out.println("*DEBUG* allKeyList " + allKeyList.toString());
    */
    generationSpecList.add(GenerationSpec.createValueList(typeInfo, keyList));
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    String columnName = "col" + columnNum++;
    columns.add(columnName);
    ExprNodeDesc listOrMapColExpr;
    listOrMapColExpr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
    children.add(listOrMapColExpr);
    children.add(keyColExpr);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    allowNulls, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    String[] columnNames = columns.toArray(new String[0]);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    final GenericUDF udf = new GenericUDFIndex();
    ObjectInspector[] argumentOIs = new ObjectInspector[2];
    argumentOIs[0] = objectInspector;
    argumentOIs[1] = keyObjectInspector;
    final ObjectInspector outputObjectInspector = udf.initialize(argumentOIs);
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(elementTypeInfo, udf, children);
    System.out.println("here");
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[IndexTestMode.count][];
    for (int i = 0; i < IndexTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        IndexTestMode indexTestMode = IndexTestMode.values()[i];
        switch(indexTestMode) {
            case ROW_MODE:
                if (!doRowCastTest(typeInfo, columns, children, udf, exprDesc, randomRows, rowSource.rowStructObjectInspector(), elementObjectInspector, outputTypeInfo, resultObjects)) {
                    return false;
                }
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                if (!doVectorCastTest(typeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, udf, exprDesc, indexTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects)) {
                    return false;
                }
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + indexTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < IndexTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            IndexTestMode indexTestMode = IndexTestMode.values()[v];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + indexTestMode + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + indexTestMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            }
        }
    }
    return true;
}
Also used : ArrayList(java.util.ArrayList) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) TreeSet(java.util.TreeSet) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) WritableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) WritableComparator(org.apache.hadoop.io.WritableComparator) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) GenericUDFIndex(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIndex) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) WritableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 14 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorIfStatement method doIfTestsWithDiffColumnScalar.

private void doIfTestsWithDiffColumnScalar(Random random, String typeName, ColumnScalarMode columnScalarMode, IfVariation ifVariation, DataTypePhysicalVariation dataTypePhysicalVariation, boolean isNullScalar1, boolean isNullScalar2) throws Exception {
    /*
    System.out.println("*DEBUG* typeName " + typeName +
        " columnScalarMode " + columnScalarMode +
        " isNullScalar1 " + isNullScalar1 +
        " isNullScalar2 " + isNullScalar2);
    */
    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
    boolean isDecimal64 = (dataTypePhysicalVariation == DataTypePhysicalVariation.DECIMAL_64);
    final int decimal64Scale = (isDecimal64 ? ((DecimalTypeInfo) typeInfo).getScale() : 0);
    List<String> explicitTypeNameList = new ArrayList<String>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    explicitTypeNameList.add("boolean");
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    if (columnScalarMode != ColumnScalarMode.SCALAR_SCALAR) {
        explicitTypeNameList.add(typeName);
        explicitDataTypePhysicalVariationList.add(dataTypePhysicalVariation);
        if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN) {
            explicitTypeNameList.add(typeName);
            explicitDataTypePhysicalVariationList.add(dataTypePhysicalVariation);
        }
    }
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initExplicitSchema(random, explicitTypeNameList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    List<String> columns = new ArrayList<String>();
    // The boolean predicate.
    columns.add("col1");
    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Boolean.class, "col1", "table", false);
    int columnNum = 2;
    ExprNodeDesc col2Expr;
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
        String columnName = "col" + (columnNum++);
        col2Expr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
        columns.add(columnName);
    } else {
        Object scalar1Object;
        if (isNullScalar1) {
            scalar1Object = null;
        } else {
            scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo);
        }
        col2Expr = new ExprNodeConstantDesc(typeInfo, scalar1Object);
    }
    ExprNodeDesc col3Expr;
    if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
        String columnName = "col" + (columnNum++);
        col3Expr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
        columns.add(columnName);
    } else {
        Object scalar2Object;
        if (isNullScalar2) {
            scalar2Object = null;
        } else {
            scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo);
        }
        col3Expr = new ExprNodeConstantDesc(typeInfo, scalar2Object);
    }
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    children.add(col2Expr);
    children.add(col3Expr);
    // ----------------------------------------------------------------------------------------------
    String[] columnNames = columns.toArray(new String[0]);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[IfStmtTestMode.count][];
    for (int i = 0; i < IfStmtTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        IfStmtTestMode ifStmtTestMode = IfStmtTestMode.values()[i];
        switch(ifStmtTestMode) {
            case ROW_MODE:
                doRowIfTest(typeInfo, columns, children, randomRows, rowSource.rowStructObjectInspector(), resultObjects);
                break;
            case ADAPTOR_WHEN:
            case VECTOR_EXPRESSION:
                doVectorIfTest(typeInfo, ifVariation, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, ifStmtTestMode, columnScalarMode, batchSource, resultObjects);
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + ifStmtTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < IfStmtTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            if (ifVariation.isFilter && expectedResult == null && vectorResult != null) {
                // This is OK.
                boolean vectorBoolean = ((BooleanWritable) vectorResult).get();
                if (vectorBoolean) {
                    Assert.fail("Row " + i + " typeName " + typeInfo.getTypeName() + " " + ifVariation + " result is NOT NULL and true" + " does not match row-mode expected result is NULL which means false here" + " row values " + Arrays.toString(randomRows[i]));
                }
            } else if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " " + IfStmtTestMode.values()[v] + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null));
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " " + IfStmtTestMode.values()[v] + " " + columnScalarMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")");
                }
            }
        }
    }
}
Also used : ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ArrayList(java.util.ArrayList) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 15 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorNegative method doTests.

private void doTests(Random random, TypeInfo typeInfo) throws Exception {
    String typeName = typeInfo.getTypeName();
    PrimitiveCategory primitiveCategory1 = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    int columnNum = 1;
    generationSpecList.add(GenerationSpec.createSameType(typeInfo));
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    ExprNodeDesc col1Expr;
    String columnName = "col" + (columnNum++);
    col1Expr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
    columns.add(columnName);
    List<ObjectInspector> objectInspectorList = new ArrayList<ObjectInspector>();
    objectInspectorList.add(VectorRandomRowSource.getObjectInspector(typeInfo));
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    // ----------------------------------------------------------------------------------------------
    String[] columnNames = columns.toArray(new String[0]);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    GenericUDF genericUdf = new GenericUDFOPNegative();
    ObjectInspector[] objectInspectors = objectInspectorList.toArray(new ObjectInspector[objectInspectorList.size()]);
    ObjectInspector outputObjectInspector = null;
    try {
        outputObjectInspector = genericUdf.initialize(objectInspectors);
    } catch (Exception e) {
        Assert.fail(e.toString());
    }
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(outputTypeInfo, genericUdf, children);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[NegativeTestMode.count][];
    for (int i = 0; i < NegativeTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        NegativeTestMode negativeTestMode = NegativeTestMode.values()[i];
        switch(negativeTestMode) {
            case ROW_MODE:
                doRowArithmeticTest(typeInfo, columns, children, exprDesc, randomRows, rowSource.rowStructObjectInspector(), outputTypeInfo, resultObjects);
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                doVectorArithmeticTest(typeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, exprDesc, negativeTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects);
                break;
            default:
                throw new RuntimeException("Unexpected Negative operator test mode " + negativeTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < NegativeTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " typeName " + typeName + " outputTypeName " + outputTypeInfo.getTypeName() + " " + NegativeTestMode.values()[v] + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i]));
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " typeName " + typeName + " outputTypeName " + outputTypeInfo.getTypeName() + " " + NegativeTestMode.values()[v] + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
                }
            }
        }
    }
}
Also used : ArrayList(java.util.ArrayList) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) GenericUDFOPNegative(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Aggregations

VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)50 Random (java.util.Random)24 VerifyFastRowHashMap (org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap)24 Test (org.junit.Test)24 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)20 ArrayList (java.util.ArrayList)19 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)19 VectorRandomBatchSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource)19 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)19 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)19 GenerationSpec (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec)18 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)17 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)16 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)15 VectorizedRowBatchCtx (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx)12 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)12 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)11 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)11 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)8 HiveConf (org.apache.hadoop.hive.conf.HiveConf)7