Search in sources :

Example 6 with VectorExtractRow

use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.

the class TestMapJoinOperator method generateBigAndSmallTableRowLogLines.

private void generateBigAndSmallTableRowLogLines(MapJoinTestDescription testDesc, MapJoinTestData testData) throws HiveException {
    // Generate Big Table rows log lines...
    VectorExtractRow vectorExtractRow = new VectorExtractRow();
    vectorExtractRow.init(testDesc.bigTableTypeInfos);
    final int bigTableColumnCount = testDesc.bigTableTypeInfos.length;
    Object[] bigTableRowObjects = new Object[bigTableColumnCount];
    /*
    PrintStream big_ps;
    try {
      big_ps = new PrintStream("/Users/mmccline/VecFullOuterRefresh/out_big");
    } catch (Exception e) {
      throw new HiveException(e);
    }
    */
    VectorRandomBatchSource bigTableBatchSource = testData.getBigTableBatchSource();
    VectorizedRowBatch batch = testData.getBigTableBatch();
    bigTableBatchSource.resetBatchIteration();
    while (bigTableBatchSource.fillNextBatch(batch)) {
        final int size = testData.bigTableBatch.size;
        for (int r = 0; r < size; r++) {
            vectorExtractRow.extractRow(testData.bigTableBatch, r, bigTableRowObjects);
        // big_ps.println(rowToCsvString(bigTableRowObjects));
        }
    }
    // big_ps.close();
    /*
    PrintStream small_ps;
    try {
      small_ps = new PrintStream("/Users/mmccline/VecFullOuterRefresh/out_small");
    } catch (Exception e) {
      throw new HiveException(e);
    }
    */
    // Generate Small Table rows log lines...
    final int keyKeyColumnNumsLength = testDesc.bigTableKeyColumnNums.length;
    final int smallTableRetainValueLength = testDesc.smallTableRetainValueColumnNums.length;
    final int smallTableLength = keyKeyColumnNumsLength + smallTableRetainValueLength;
    for (Entry<RowTestObjects, Integer> entry : testData.smallTableKeyHashMap.entrySet()) {
        if (smallTableRetainValueLength == 0) {
            Object[] smallTableRowObjects = entry.getKey().getRow();
        // small_ps.println(rowToCsvString(smallTableRowObjects));
        } else {
            Integer valueIndex = entry.getValue();
            ArrayList<RowTestObjects> valueList = testData.smallTableValues.get(valueIndex);
            final int valueCount = valueList.size();
            for (int v = 0; v < valueCount; v++) {
                Object[] smallTableRowObjects = new Object[smallTableLength];
                System.arraycopy(entry.getKey().getRow(), 0, smallTableRowObjects, 0, keyKeyColumnNumsLength);
                int outputColumnNum = keyKeyColumnNumsLength;
                Object[] valueRow = valueList.get(v).getRow();
                for (int o = 0; o < smallTableRetainValueLength; o++) {
                    smallTableRowObjects[outputColumnNum++] = valueRow[testDesc.smallTableRetainValueColumnNums[o]];
                }
            // small_ps.println(rowToCsvString(smallTableRowObjects));
            }
        }
    }
// small_ps.close();
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) VectorExtractRow(org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow) RowTestObjects(org.apache.hadoop.hive.ql.exec.util.rowobjects.RowTestObjects)

Example 7 with VectorExtractRow

use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.

the class VectorReduceSinkObjectHashOperator method initializeOp.

@Override
protected void initializeOp(Configuration hconf) throws HiveException {
    super.initializeOp(hconf);
    VectorExpression.doTransientInit(reduceSinkBucketExpressions, hconf);
    VectorExpression.doTransientInit(reduceSinkPartitionExpressions, hconf);
    if (!isEmptyKey) {
        // For this variation, we serialize the key without caring if it single Long,
        // single String, multi-key, etc.
        keyOutput = new Output();
        keyBinarySortableSerializeWrite.set(keyOutput);
        keyVectorSerializeRow = new VectorSerializeRow<BinarySortableSerializeWrite>(keyBinarySortableSerializeWrite);
        keyVectorSerializeRow.init(reduceSinkKeyTypeInfos, reduceSinkKeyColumnMap);
    }
    if (isEmptyBuckets) {
        numBuckets = 0;
    } else {
        numBuckets = conf.getNumBuckets();
        bucketObjectInspectors = getObjectInspectorArray(reduceSinkBucketTypeInfos);
        bucketVectorExtractRow = new VectorExtractRow();
        bucketVectorExtractRow.init(reduceSinkBucketTypeInfos, reduceSinkBucketColumnMap);
        bucketFieldValues = new Object[reduceSinkBucketTypeInfos.length];
    }
    if (isEmptyPartitions) {
        nonPartitionRandom = new Random(12345);
    } else {
        partitionObjectInspectors = getObjectInspectorArray(reduceSinkPartitionTypeInfos);
        partitionVectorExtractRow = new VectorExtractRow();
        partitionVectorExtractRow.init(reduceSinkPartitionTypeInfos, reduceSinkPartitionColumnMap);
        partitionFieldValues = new Object[reduceSinkPartitionTypeInfos.length];
    }
    // Set hashFunc
    hashFunc = getConf().getBucketingVersion() == 2 && !vectorDesc.getIsAcidChange() ? ObjectInspectorUtils::getBucketHashCode : ObjectInspectorUtils::getBucketHashCodeOld;
    // Set function to evaluate _bucket_number if needed.
    if (reduceSinkKeyExpressions != null) {
        for (VectorExpression ve : reduceSinkKeyExpressions) {
            if (ve instanceof BucketNumExpression) {
                bucketExpr = (BucketNumExpression) ve;
                break;
            }
        }
    }
}
Also used : Random(java.util.Random) Output(org.apache.hadoop.hive.serde2.ByteStream.Output) BucketNumExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.BucketNumExpression) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) BinarySortableSerializeWrite(org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableSerializeWrite) VectorExtractRow(org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow)

Example 8 with VectorExtractRow

use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.

the class TestVectorDateDiff method doVectorDateAddSubTest.

private void doVectorDateAddSubTest(TypeInfo dateTimeStringTypeInfo1, TypeInfo dateTimeStringTypeInfo2, List<String> columns, TypeInfo[] typeInfos, List<ExprNodeDesc> children, ExprNodeGenericFuncDesc exprDesc, DateDiffTestMode dateDiffTestMode, ColumnScalarMode columnScalarMode, VectorRandomBatchSource batchSource, VectorizedRowBatchCtx batchContext, Object[] resultObjects) throws Exception {
    HiveConf hiveConf = new HiveConf();
    if (dateDiffTestMode == DateDiffTestMode.ADAPTOR) {
        hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
    }
    DataTypePhysicalVariation[] dataTypePhysicalVariations = new DataTypePhysicalVariation[2];
    Arrays.fill(dataTypePhysicalVariations, DataTypePhysicalVariation.NONE);
    VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
    VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
    vectorExpression.transientInit(hiveConf);
    if (dateDiffTestMode == DateDiffTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
        System.out.println("*NO NATIVE VECTOR EXPRESSION* dateTimeStringTypeInfo1 " + dateTimeStringTypeInfo1.toString() + " dateTimeStringTypeInfo2 " + dateTimeStringTypeInfo2.toString() + " dateDiffTestMode " + dateDiffTestMode + " columnScalarMode " + columnScalarMode + " vectorExpression " + vectorExpression.toString());
    }
    VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
    VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
    resultVectorExtractRow.init(new TypeInfo[] { TypeInfoFactory.intTypeInfo }, new int[] { columns.size() });
    Object[] scrqtchRow = new Object[1];
    // System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
    /*
    System.out.println(
        "*DEBUG* dateTimeStringTypeInfo1 " + dateTimeStringTypeInfo1.toString() +
        " dateTimeStringTypeInfo2 " + dateTimeStringTypeInfo2.toString() +
        " dateDiffTestMode " + dateDiffTestMode +
        " columnScalarMode " + columnScalarMode +
        " vectorExpression " + vectorExpression.toString());
    */
    batchSource.resetBatchIteration();
    int rowIndex = 0;
    while (true) {
        if (!batchSource.fillNextBatch(batch)) {
            break;
        }
        vectorExpression.evaluate(batch);
        extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, resultObjects);
        rowIndex += batch.size;
    }
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) HiveConf(org.apache.hadoop.hive.conf.HiveConf) VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) VectorUDFAdaptor(org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor) VectorExtractRow(org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow)

Example 9 with VectorExtractRow

use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.

the class TestVectorNull method doVectorCastTest.

private boolean doVectorCastTest(TypeInfo typeInfo, boolean isFilter, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, GenericUDF udf, ExprNodeGenericFuncDesc exprDesc, NullTestMode nullTestMode, VectorRandomBatchSource batchSource, ObjectInspector objectInspector, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception {
    HiveConf hiveConf = new HiveConf();
    if (nullTestMode == NullTestMode.ADAPTOR) {
        hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
    }
    VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
    VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc, (isFilter ? VectorExpressionDescriptor.Mode.FILTER : VectorExpressionDescriptor.Mode.PROJECTION));
    vectorExpression.transientInit(hiveConf);
    if (nullTestMode == NullTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
        System.out.println("*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " nullTestMode " + nullTestMode + " isFilter " + isFilter + " vectorExpression " + vectorExpression.toString());
    }
    // System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
    /*
    System.out.println(
        "*DEBUG* typeInfo " + typeInfo.toString() +
        " nullTestMode " + nullTestMode +
        " isFilter " + isFilter +
        " vectorExpression " + vectorExpression.toString());
    */
    VectorRandomRowSource rowSource = batchSource.getRowSource();
    VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
    null, /* partitionColumnCount */
    0, /* virtualColumnCount */
    0, /* neededVirtualColumns */
    null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
    VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
    VectorExtractRow resultVectorExtractRow = null;
    Object[] scrqtchRow = null;
    if (!isFilter) {
        resultVectorExtractRow = new VectorExtractRow();
        final int outputColumnNum = vectorExpression.getOutputColumnNum();
        resultVectorExtractRow.init(new TypeInfo[] { outputTypeInfo }, new int[] { outputColumnNum });
        scrqtchRow = new Object[1];
    }
    boolean copySelectedInUse = false;
    int[] copySelected = new int[VectorizedRowBatch.DEFAULT_SIZE];
    batchSource.resetBatchIteration();
    int rowIndex = 0;
    while (true) {
        if (!batchSource.fillNextBatch(batch)) {
            break;
        }
        final int originalBatchSize = batch.size;
        if (isFilter) {
            copySelectedInUse = batch.selectedInUse;
            if (batch.selectedInUse) {
                System.arraycopy(batch.selected, 0, copySelected, 0, originalBatchSize);
            }
        }
        // In filter mode, the batch size can be made smaller.
        vectorExpression.evaluate(batch);
        if (!isFilter) {
            extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, objectInspector, resultObjects);
        } else {
            final int currentBatchSize = batch.size;
            if (copySelectedInUse && batch.selectedInUse) {
                int selectIndex = 0;
                for (int i = 0; i < originalBatchSize; i++) {
                    final int originalBatchIndex = copySelected[i];
                    final boolean booleanResult;
                    if (selectIndex < currentBatchSize && batch.selected[selectIndex] == originalBatchIndex) {
                        booleanResult = true;
                        selectIndex++;
                    } else {
                        booleanResult = false;
                    }
                    resultObjects[rowIndex + i] = new BooleanWritable(booleanResult);
                }
            } else if (batch.selectedInUse) {
                int selectIndex = 0;
                for (int i = 0; i < originalBatchSize; i++) {
                    final boolean booleanResult;
                    if (selectIndex < currentBatchSize && batch.selected[selectIndex] == i) {
                        booleanResult = true;
                        selectIndex++;
                    } else {
                        booleanResult = false;
                    }
                    resultObjects[rowIndex + i] = new BooleanWritable(booleanResult);
                }
            } else if (currentBatchSize == 0) {
                // Whole batch got zapped.
                for (int i = 0; i < originalBatchSize; i++) {
                    resultObjects[rowIndex + i] = new BooleanWritable(false);
                }
            } else {
                // Every row kept.
                for (int i = 0; i < originalBatchSize; i++) {
                    resultObjects[rowIndex + i] = new BooleanWritable(true);
                }
            }
        }
        rowIndex += originalBatchSize;
    }
    return true;
}
Also used : VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) VectorUDFAdaptor(org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor) VectorExtractRow(org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow) VectorizedRowBatchCtx(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx) VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveConf(org.apache.hadoop.hive.conf.HiveConf) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 10 with VectorExtractRow

use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.

the class TestVectorStringConcat method doVectorStringConcatTest.

private void doVectorStringConcatTest(TypeInfo stringTypeInfo1, TypeInfo stringTypeInfo2, List<String> columns, TypeInfo[] typeInfos, List<ExprNodeDesc> children, StringConcatTestMode stringConcatTestMode, ColumnScalarMode columnScalarMode, VectorRandomBatchSource batchSource, VectorizedRowBatchCtx batchContext, ObjectInspector rowInspector, GenericUDF genericUdf, Object[] resultObjects) throws Exception {
    HiveConf hiveConf = new HiveConf();
    if (stringConcatTestMode == StringConcatTestMode.ADAPTOR) {
        hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
    }
    DataTypePhysicalVariation[] dataTypePhysicalVariations = new DataTypePhysicalVariation[2];
    Arrays.fill(dataTypePhysicalVariations, DataTypePhysicalVariation.NONE);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, genericUdf, children);
    // ---------------------------------------
    // Just so we can get the output type...
    ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
    evaluator.initialize(rowInspector);
    ObjectInspector objectInspector = evaluator.getOutputOI();
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(objectInspector);
    /*
     * Again with correct output type...
     */
    exprDesc = new ExprNodeGenericFuncDesc(outputTypeInfo, genericUdf, children);
    // ---------------------------------------
    VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
    VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
    vectorExpression.transientInit(hiveConf);
    if (stringConcatTestMode == StringConcatTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
        System.out.println("*NO NATIVE VECTOR EXPRESSION* stringTypeInfo1 " + stringTypeInfo1.toString() + " stringTypeInfo2 " + stringTypeInfo2.toString() + " stringConcatTestMode " + stringConcatTestMode + " columnScalarMode " + columnScalarMode + " vectorExpression " + vectorExpression.toString());
    }
    VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
    VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
    resultVectorExtractRow.init(new TypeInfo[] { outputTypeInfo }, new int[] { columns.size() });
    Object[] scrqtchRow = new Object[1];
    // System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
    /*
    System.out.println(
        "*DEBUG* stringTypeInfo1 " + stringTypeInfo1.toString() +
        " stringTypeInfo2 " + stringTypeInfo2.toString() +
        " stringConcatTestMode " + stringConcatTestMode +
        " columnScalarMode " + columnScalarMode +
        " vectorExpression " + vectorExpression.toString());
    */
    batchSource.resetBatchIteration();
    int rowIndex = 0;
    while (true) {
        if (!batchSource.fillNextBatch(batch)) {
            break;
        }
        vectorExpression.evaluate(batch);
        extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, objectInspector, resultObjects);
        rowIndex += batch.size;
    }
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) VectorUDFAdaptor(org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) VectorExtractRow(org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow) VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) HiveConf(org.apache.hadoop.hive.conf.HiveConf) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)

Aggregations

VectorExtractRow (org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow)23 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)19 VectorizationContext (org.apache.hadoop.hive.ql.exec.vector.VectorizationContext)18 HiveConf (org.apache.hadoop.hive.conf.HiveConf)17 VectorUDFAdaptor (org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor)16 VectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)14 VectorizedRowBatchCtx (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx)12 VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)7 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)6 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)5 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)4 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)4 BooleanWritable (org.apache.hadoop.io.BooleanWritable)4 RowTestObjects (org.apache.hadoop.hive.ql.exec.util.rowobjects.RowTestObjects)2 VectorRandomBatchSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource)2 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)2 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)2 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)2 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)2 ArrayList (java.util.ArrayList)1