Search in sources :

Example 36 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorTimestampExtract method doVectorCastTest.

private boolean doVectorCastTest(TypeInfo dateTimeStringTypeInfo, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, ExprNodeGenericFuncDesc exprDesc, TimestampExtractTestMode timestampExtractTestMode, VectorRandomBatchSource batchSource, Object[] resultObjects) throws Exception {
    HiveConf hiveConf = new HiveConf();
    if (timestampExtractTestMode == TimestampExtractTestMode.ADAPTOR) {
        hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
    }
    VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
    VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
    vectorExpression.transientInit(hiveConf);
    if (timestampExtractTestMode == TimestampExtractTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
        System.out.println("*NO NATIVE VECTOR EXPRESSION* dateTimeStringTypeInfo " + dateTimeStringTypeInfo.toString() + " timestampExtractTestMode " + timestampExtractTestMode + " vectorExpression " + vectorExpression.toString());
    }
    // System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
    /*
    System.out.println(
        "*DEBUG* dateTimeStringTypeInfo " + dateTimeStringTypeInfo.toString() +
        " timestampExtractTestMode " + timestampExtractTestMode +
        " vectorExpression " + vectorExpression.getClass().getSimpleName());
    */
    VectorRandomRowSource rowSource = batchSource.getRowSource();
    VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
    null, /* partitionColumnCount */
    0, /* virtualColumnCount */
    0, /* neededVirtualColumns */
    null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
    VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
    VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
    resultVectorExtractRow.init(new TypeInfo[] { TypeInfoFactory.intTypeInfo }, new int[] { vectorExpression.getOutputColumnNum() });
    Object[] scrqtchRow = new Object[1];
    batchSource.resetBatchIteration();
    int rowIndex = 0;
    while (true) {
        if (!batchSource.fillNextBatch(batch)) {
            break;
        }
        vectorExpression.evaluate(batch);
        extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, TypeInfoFactory.intTypeInfo, resultObjects);
        rowIndex += batch.size;
    }
    return true;
}
Also used : VectorizedRowBatchCtx(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx) VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) HiveConf(org.apache.hadoop.hive.conf.HiveConf) VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) VectorUDFAdaptor(org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor) VectorExtractRow(org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 37 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorTimestampExtract method doIfTestOneTimestampExtract.

private void doIfTestOneTimestampExtract(Random random, String dateTimeStringTypeName, String extractFunctionName) throws Exception {
    TypeInfo dateTimeStringTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(dateTimeStringTypeName);
    PrimitiveCategory dateTimeStringPrimitiveCategory = ((PrimitiveTypeInfo) dateTimeStringTypeInfo).getPrimitiveCategory();
    boolean isStringFamily = (dateTimeStringPrimitiveCategory == PrimitiveCategory.STRING || dateTimeStringPrimitiveCategory == PrimitiveCategory.CHAR || dateTimeStringPrimitiveCategory == PrimitiveCategory.VARCHAR);
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    int columnNum = 1;
    ExprNodeDesc col1Expr;
    if (!isStringFamily) {
        generationSpecList.add(GenerationSpec.createSameType(dateTimeStringTypeInfo));
    } else {
        generationSpecList.add(GenerationSpec.createStringFamilyOtherTypeValue(dateTimeStringTypeInfo, TypeInfoFactory.timestampTypeInfo));
    }
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    String columnName = "col" + (columnNum++);
    col1Expr = new ExprNodeColumnDesc(dateTimeStringTypeInfo, columnName, "table", false);
    columns.add(columnName);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    // ----------------------------------------------------------------------------------------------
    String[] columnNames = columns.toArray(new String[0]);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    if (dateTimeStringPrimitiveCategory == PrimitiveCategory.DATE && (extractFunctionName.equals("hour") || extractFunctionName.equals("minute") || extractFunctionName.equals("second"))) {
        return;
    }
    final GenericUDF udf;
    switch(extractFunctionName) {
        case "day":
            udf = new UDFDayOfMonth();
            break;
        case "dayofweek":
            GenericUDFBridge dayOfWeekUDFBridge = new GenericUDFBridge();
            dayOfWeekUDFBridge.setUdfClassName(UDFDayOfWeek.class.getName());
            udf = dayOfWeekUDFBridge;
            break;
        case "hour":
            udf = new UDFHour();
            break;
        case "minute":
            udf = new UDFMinute();
            break;
        case "month":
            udf = new UDFMonth();
            break;
        case "second":
            udf = new UDFSecond();
            break;
        case "yearweek":
            GenericUDFBridge weekOfYearUDFBridge = new GenericUDFBridge();
            weekOfYearUDFBridge.setUdfClassName(UDFWeekOfYear.class.getName());
            udf = weekOfYearUDFBridge;
            break;
        case "year":
            udf = new UDFYear();
            break;
        default:
            throw new RuntimeException("Unexpected extract function name " + extractFunctionName);
    }
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, children);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[TimestampExtractTestMode.count][];
    for (int i = 0; i < TimestampExtractTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        TimestampExtractTestMode timestampExtractTestMode = TimestampExtractTestMode.values()[i];
        switch(timestampExtractTestMode) {
            case ROW_MODE:
                if (!doRowCastTest(dateTimeStringTypeInfo, columns, children, exprDesc, randomRows, rowSource.rowStructObjectInspector(), resultObjects)) {
                    return;
                }
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                if (!doVectorCastTest(dateTimeStringTypeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, exprDesc, timestampExtractTestMode, batchSource, resultObjects)) {
                    return;
                }
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + timestampExtractTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < TimestampExtractTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " dateTimeStringTypeName " + dateTimeStringTypeName + " extractFunctionName " + extractFunctionName + " " + TimestampExtractTestMode.values()[v] + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]));
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " dateTimeStringTypeName " + dateTimeStringTypeName + " extractFunctionName " + extractFunctionName + " " + TimestampExtractTestMode.values()[v] + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
                }
            }
        }
    }
}
Also used : UDFDayOfMonth(org.apache.hadoop.hive.ql.udf.UDFDayOfMonth) ArrayList(java.util.ArrayList) UDFYear(org.apache.hadoop.hive.ql.udf.UDFYear) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) GenericUDFBridge(org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) UDFMinute(org.apache.hadoop.hive.ql.udf.UDFMinute) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) UDFHour(org.apache.hadoop.hive.ql.udf.UDFHour) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) UDFDayOfWeek(org.apache.hadoop.hive.ql.udf.UDFDayOfWeek) UDFWeekOfYear(org.apache.hadoop.hive.ql.udf.UDFWeekOfYear) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) UDFSecond(org.apache.hadoop.hive.ql.udf.UDFSecond) UDFMonth(org.apache.hadoop.hive.ql.udf.UDFMonth) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 38 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testMultiKeyRowsClipped1.

@Test
public void testMultiKeyRowsClipped1() throws Exception {
    random = new Random(2331);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastMultiKeyHashMapContainer map = new VectorMapJoinFastMultiKeyHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.MULTI_KEY, verifyTable, new String[] { "varchar(20)", "date", "interval_day_time" }, /* doClipping */
    true, /* useExactBytes */
    false);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Example 39 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testIntRows.

@Test
public void testIntRows() throws Exception {
    random = new Random(927337);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastLongHashMapContainer map = new VectorMapJoinFastLongHashMapContainer(false, false, HashTableKeyType.INT, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.INT, verifyTable, new String[] { "int" }, /* doClipping */
    false, /* useExactBytes */
    false);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Example 40 with VectorRandomRowSource

use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.

the class TestVectorMapJoinFastRowHashMap method testStringRowsExact.

@Test
public void testStringRowsExact() throws Exception {
    random = new Random(8235);
    // Use a large capacity that doesn't require expansion, yet.
    VectorMapJoinFastStringHashMapContainer map = new VectorMapJoinFastStringHashMapContainer(false, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
    VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
    VectorRandomRowSource valueSource = new VectorRandomRowSource();
    valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
    false, /* isUnicodeOk */
    false);
    int rowCount = 1000;
    Object[][] rows = valueSource.randomRows(rowCount);
    addAndVerifyRows(valueSource, rows, map, HashTableKeyType.STRING, verifyTable, new String[] { "string" }, /* doClipping */
    false, /* useExactBytes */
    true);
}
Also used : Random(java.util.Random) VerifyFastRowHashMap(org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource) Test(org.junit.Test)

Aggregations

VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)50 Random (java.util.Random)24 VerifyFastRowHashMap (org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.CheckFastRowHashMap.VerifyFastRowHashMap)24 Test (org.junit.Test)24 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)20 ArrayList (java.util.ArrayList)19 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)19 VectorRandomBatchSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource)19 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)19 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)19 GenerationSpec (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec)18 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)17 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)16 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)15 VectorizedRowBatchCtx (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx)12 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)12 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)11 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)11 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)8 HiveConf (org.apache.hadoop.hive.conf.HiveConf)7