Search in sources :

Example 21 with TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.

the class GenericUDFJsonRead method initialize.

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    checkArgsSize(arguments, 2, 2);
    checkArgPrimitive(arguments, 0);
    checkArgPrimitive(arguments, 1);
    if (!ObjectInspectorUtils.isConstantObjectInspector(arguments[1])) {
        throw new UDFArgumentTypeException(1, getFuncName() + " argument 2 may only be a constant");
    }
    inputConverter = new TextConverter((PrimitiveObjectInspector) arguments[0]);
    String typeStr = getConstantStringValue(arguments, 1);
    try {
        final TypeInfo t = TypeInfoUtils.getTypeInfoFromTypeString(typeStr);
        final ObjectInspector oi = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(t);
        jsonReader = new HiveJsonReader(oi);
        jsonReader.enable(Feature.PRIMITIVE_TO_WRITABLE);
    } catch (Exception e) {
        throw new UDFArgumentException(getFuncName() + ": Error parsing typestring: " + e.getMessage());
    }
    return jsonReader.getObjectInspector();
}
Also used : UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) HiveJsonReader(org.apache.hadoop.hive.serde2.json.HiveJsonReader) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) TextConverter(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TextConverter) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UDFArgumentException(org.apache.hadoop.hive.ql.exec.UDFArgumentException) UDFArgumentTypeException(org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Example 22 with TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.

the class TestVectorDateDiff method doRowDateAddSubTest.

private void doRowDateAddSubTest(TypeInfo dateTimeStringTypeInfo1, TypeInfo dateTimeStringTypeInfo2, List<String> columns, List<ExprNodeDesc> children, ExprNodeGenericFuncDesc exprDesc, Object[][] randomRows, ColumnScalarMode columnScalarMode, ObjectInspector rowInspector, Object[] resultObjects) throws Exception {
    /*
    System.out.println(
        "*DEBUG* dateTimeStringTypeInfo " + dateTimeStringTypeInfo1.toString() +
        " dateTimeStringTypeInfo2 " + dateTimeStringTypeInfo2 +
        " dateDiffTestMode ROW_MODE" +
        " columnScalarMode " + columnScalarMode +
        " exprDesc " + exprDesc.toString());
    */
    HiveConf hiveConf = new HiveConf();
    ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
    evaluator.initialize(rowInspector);
    ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(TypeInfoFactory.intTypeInfo);
    final int rowCount = randomRows.length;
    for (int i = 0; i < rowCount; i++) {
        Object[] row = randomRows[i];
        Object result = evaluator.evaluate(row);
        Object copyResult = ObjectInspectorUtils.copyToStandardObject(result, objectInspector, ObjectInspectorCopyOption.WRITABLE);
        resultObjects[i] = copyResult;
    }
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) HiveConf(org.apache.hadoop.hive.conf.HiveConf)

Example 23 with TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.

the class TestVectorNull method doIsNullOnRandomDataType.

private boolean doIsNullOnRandomDataType(Random random, String functionName, boolean isFilter) throws Exception {
    String typeName;
    if (functionName.equals("not")) {
        typeName = "boolean";
    } else {
        typeName = VectorRandomRowSource.getRandomTypeName(random, SupportedTypes.ALL, /* allowedTypeNameSet */
        null);
        typeName = VectorRandomRowSource.getDecoratedTypeName(random, typeName, SupportedTypes.ALL, /* allowedTypeNameSet */
        null, /* depth */
        0, /* maxDepth */
        2);
    }
    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
    // ----------------------------------------------------------------------------------------------
    ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
    // ----------------------------------------------------------------------------------------------
    GenerationSpec generationSpec = GenerationSpec.createSameType(typeInfo);
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    generationSpecList.add(generationSpec);
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    List<String> columns = new ArrayList<String>();
    columns.add("col1");
    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(typeInfo, "col1", "table", false);
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    String[] columnNames = columns.toArray(new String[0]);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    final GenericUDF udf;
    final ObjectInspector outputObjectInspector;
    switch(functionName) {
        case "isnull":
            udf = new GenericUDFOPNull();
            break;
        case "isnotnull":
            udf = new GenericUDFOPNotNull();
            break;
        case "not":
            udf = new GenericUDFOPNot();
            break;
        default:
            throw new RuntimeException("Unexpected function name " + functionName);
    }
    ObjectInspector[] argumentOIs = new ObjectInspector[] { objectInspector };
    outputObjectInspector = udf.initialize(argumentOIs);
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf, children);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[NullTestMode.count][];
    for (int i = 0; i < NullTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        NullTestMode nullTestMode = NullTestMode.values()[i];
        switch(nullTestMode) {
            case ROW_MODE:
                if (!doRowCastTest(typeInfo, isFilter, columns, children, udf, exprDesc, randomRows, rowSource.rowStructObjectInspector(), resultObjects)) {
                    return false;
                }
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                if (!doVectorCastTest(typeInfo, isFilter, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, udf, exprDesc, nullTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects)) {
                    return false;
                }
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + nullTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < NullTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            NullTestMode nullTestMode = NullTestMode.values()[v];
            if (isFilter && expectedResult == null && vectorResult != null) {
                // This is OK.
                boolean vectorBoolean = ((BooleanWritable) vectorResult).get();
                if (vectorBoolean) {
                    Assert.fail("Row " + i + " typeName " + typeName + " outputTypeName " + outputTypeInfo.getTypeName() + " isFilter " + isFilter + " " + nullTestMode + " result is NOT NULL and true" + " does not match row-mode expected result is NULL which means false here" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            } else if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " isFilter " + isFilter + " " + nullTestMode + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " isFilter " + isFilter + " " + nullTestMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            }
        }
    }
    return true;
}
Also used : ArrayList(java.util.ArrayList) GenericUDFOPNotNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) GenericUDFOPNull(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) BooleanWritable(org.apache.hadoop.io.BooleanWritable) GenericUDFOPNot(org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Example 24 with TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.

the class TestVectorTimestampExtract method doRowCastTest.

private boolean doRowCastTest(TypeInfo dateTimeStringTypeInfo, List<String> columns, List<ExprNodeDesc> children, ExprNodeGenericFuncDesc exprDesc, Object[][] randomRows, ObjectInspector rowInspector, Object[] resultObjects) throws Exception {
    /*
    System.out.println(
        "*DEBUG* dateTimeStringTypeInfo " + dateTimeStringTypeInfo.toString() +
        " timestampExtractTestMode ROW_MODE" +
        " exprDesc " + exprDesc.toString());
    */
    HiveConf hiveConf = new HiveConf();
    ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
    try {
        evaluator.initialize(rowInspector);
    } catch (HiveException e) {
        return false;
    }
    ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(TypeInfoFactory.intTypeInfo);
    PrimitiveCategory dateTimeStringPrimitiveCategory = ((PrimitiveTypeInfo) dateTimeStringTypeInfo).getPrimitiveCategory();
    final int rowCount = randomRows.length;
    for (int i = 0; i < rowCount; i++) {
        Object[] row = randomRows[i];
        Object object = row[0];
        Object result;
        switch(dateTimeStringPrimitiveCategory) {
            case TIMESTAMP:
                result = evaluator.evaluate((TimestampWritableV2) object);
                break;
            case DATE:
                result = evaluator.evaluate((DateWritableV2) object);
                break;
            case STRING:
                {
                    Text text;
                    if (object == null) {
                        text = null;
                    } else if (object instanceof String) {
                        text = new Text();
                        text.set((String) object);
                    } else {
                        text = (Text) object;
                    }
                    result = evaluator.evaluate(text);
                }
                break;
            default:
                throw new RuntimeException("Unexpected date timestamp string primitive category " + dateTimeStringPrimitiveCategory);
        }
        Object copyResult = ObjectInspectorUtils.copyToStandardObject(result, objectInspector, ObjectInspectorCopyOption.WRITABLE);
        resultObjects[i] = copyResult;
    }
    return true;
}
Also used : ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) ExprNodeEvaluator(org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) Text(org.apache.hadoop.io.Text) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveConf(org.apache.hadoop.hive.conf.HiveConf) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)

Example 25 with TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.

the class TestVectorIndex method doIndexOnRandomDataType.

private boolean doIndexOnRandomDataType(Random random, boolean isList, String keyTypeName, String elementRootTypeName, boolean allowNulls, boolean isScalarIndex) throws Exception {
    String elementTypeName = VectorRandomRowSource.getDecoratedTypeName(random, elementRootTypeName, SupportedTypes.ALL, /* allowedTypeNameSet */
    null, /* depth */
    0, /* maxDepth */
    3);
    TypeInfo elementTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(elementTypeName);
    ObjectInspector elementObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(elementTypeInfo);
    // ----------------------------------------------------------------------------------------------
    final TypeInfo keyTypeInfo;
    if (isList) {
        keyTypeInfo = TypeInfoFactory.intTypeInfo;
    } else {
        keyTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(keyTypeName);
    }
    final ObjectInspector keyObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(keyTypeInfo);
    Object exampleObject = (isList ? ((WritableIntObjectInspector) keyObjectInspector).create(0) : VectorRandomRowSource.randomWritable(random, keyTypeInfo, keyObjectInspector, DataTypePhysicalVariation.NONE, /* allowNull */
    false));
    WritableComparator writableComparator = WritableComparator.get((Class<? extends WritableComparable>) exampleObject.getClass());
    final int allKeyCount = 10 + random.nextInt(10);
    final int keyCount = 5 + random.nextInt(allKeyCount / 2);
    List<Object> allKeyList = new ArrayList<Object>(allKeyCount);
    Set<Object> allKeyTreeSet = new TreeSet<Object>(writableComparator);
    int fillAllKeyCount = 0;
    while (fillAllKeyCount < allKeyCount) {
        Object object;
        if (isList) {
            WritableIntObjectInspector writableOI = (WritableIntObjectInspector) keyObjectInspector;
            int index = random.nextInt(keyCount);
            object = writableOI.create(index);
            while (allKeyTreeSet.contains(object)) {
                index = (random.nextBoolean() ? random.nextInt() : (random.nextBoolean() ? -1 : keyCount));
                object = writableOI.create(index);
            }
        } else {
            do {
                object = VectorRandomRowSource.randomWritable(random, keyTypeInfo, keyObjectInspector, DataTypePhysicalVariation.NONE, /* allowNull */
                false);
            } while (allKeyTreeSet.contains(object));
        }
        allKeyList.add(object);
        allKeyTreeSet.add(object);
        fillAllKeyCount++;
    }
    List<Object> keyList = new ArrayList<Object>();
    Set<Object> keyTreeSet = new TreeSet<Object>(writableComparator);
    int fillKeyCount = 0;
    while (fillKeyCount < keyCount) {
        Object newKey = allKeyList.get(random.nextInt(allKeyCount));
        if (keyTreeSet.contains(newKey)) {
            continue;
        }
        keyList.add(newKey);
        keyTreeSet.add(newKey);
        fillKeyCount++;
    }
    // ----------------------------------------------------------------------------------------------
    final TypeInfo typeInfo;
    if (isList) {
        ListTypeInfo listTypeInfo = new ListTypeInfo();
        listTypeInfo.setListElementTypeInfo(elementTypeInfo);
        typeInfo = listTypeInfo;
    } else {
        MapTypeInfo mapTypeInfo = new MapTypeInfo();
        mapTypeInfo.setMapKeyTypeInfo(keyTypeInfo);
        mapTypeInfo.setMapValueTypeInfo(elementTypeInfo);
        typeInfo = mapTypeInfo;
    }
    final String typeName = typeInfo.getTypeName();
    final ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
    // ----------------------------------------------------------------------------------------------
    GenerationSpec generationSpec = GenerationSpec.createSameType(typeInfo);
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    List<String> columns = new ArrayList<String>();
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    int columnNum = 1;
    ExprNodeDesc keyColExpr;
    if (!isScalarIndex) {
        generationSpecList.add(GenerationSpec.createValueList(keyTypeInfo, keyList));
        explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
        String columnName = "col" + columnNum++;
        columns.add(columnName);
        keyColExpr = new ExprNodeColumnDesc(keyTypeInfo, columnName, "table", false);
    } else {
        Object scalarWritable = keyList.get(random.nextInt(keyCount));
        final Object scalarObject = VectorRandomRowSource.getNonWritableObject(scalarWritable, keyTypeInfo, keyObjectInspector);
        keyColExpr = new ExprNodeConstantDesc(keyTypeInfo, scalarObject);
    }
    /*
    System.out.println("*DEBUG* typeName " + typeName);
    System.out.println("*DEBUG* keyColExpr " + keyColExpr.toString());
    System.out.println("*DEBUG* keyList " + keyList.toString());
    System.out.println("*DEBUG* allKeyList " + allKeyList.toString());
    */
    generationSpecList.add(GenerationSpec.createValueList(typeInfo, keyList));
    explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
    String columnName = "col" + columnNum++;
    columns.add(columnName);
    ExprNodeDesc listOrMapColExpr;
    listOrMapColExpr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
    children.add(listOrMapColExpr);
    children.add(keyColExpr);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    allowNulls, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    String[] columnNames = columns.toArray(new String[0]);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    final GenericUDF udf = new GenericUDFIndex();
    ObjectInspector[] argumentOIs = new ObjectInspector[2];
    argumentOIs[0] = objectInspector;
    argumentOIs[1] = keyObjectInspector;
    final ObjectInspector outputObjectInspector = udf.initialize(argumentOIs);
    TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
    ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(elementTypeInfo, udf, children);
    System.out.println("here");
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[IndexTestMode.count][];
    for (int i = 0; i < IndexTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        IndexTestMode indexTestMode = IndexTestMode.values()[i];
        switch(indexTestMode) {
            case ROW_MODE:
                if (!doRowCastTest(typeInfo, columns, children, udf, exprDesc, randomRows, rowSource.rowStructObjectInspector(), elementObjectInspector, outputTypeInfo, resultObjects)) {
                    return false;
                }
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                if (!doVectorCastTest(typeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, udf, exprDesc, indexTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects)) {
                    return false;
                }
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + indexTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < IndexTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            IndexTestMode indexTestMode = IndexTestMode.values()[v];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + indexTestMode + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            } else {
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + indexTestMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
                }
            }
        }
    }
    return true;
}
Also used : ArrayList(java.util.ArrayList) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) TreeSet(java.util.TreeSet) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) WritableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) ConstantObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector) ExprNodeConstantDesc(org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc) WritableComparator(org.apache.hadoop.io.WritableComparator) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) GenericUDFIndex(org.apache.hadoop.hive.ql.udf.generic.GenericUDFIndex) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) WritableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Aggregations

ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)44 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)31 ArrayList (java.util.ArrayList)22 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)17 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)13 HiveConf (org.apache.hadoop.hive.conf.HiveConf)12 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)11 ExprNodeEvaluator (org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator)10 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)10 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)9 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)9 DataTypePhysicalVariation (org.apache.hadoop.hive.common.type.DataTypePhysicalVariation)8 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)8 GenericUDF (org.apache.hadoop.hive.ql.udf.generic.GenericUDF)8 VectorRandomBatchSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource)7 VectorRandomRowSource (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)7 GenerationSpec (org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec)7 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)7 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)6 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)6