use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorArithmetic method doTestsWithDiffColumnScalar.
private void doTestsWithDiffColumnScalar(Random random, TypeInfo typeInfo1, TypeInfo typeInfo2, ColumnScalarMode columnScalarMode, Arithmetic arithmetic, boolean tryDecimal64) throws Exception {
String typeName1 = typeInfo1.getTypeName();
PrimitiveCategory primitiveCategory1 = ((PrimitiveTypeInfo) typeInfo1).getPrimitiveCategory();
String typeName2 = typeInfo2.getTypeName();
PrimitiveCategory primitiveCategory2 = ((PrimitiveTypeInfo) typeInfo2).getPrimitiveCategory();
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
int columnNum = 1;
ExprNodeDesc col1Expr;
Object scalar1Object = null;
final boolean decimal64Enable1 = checkDecimal64(tryDecimal64, typeInfo1);
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
generationSpecList.add(GenerationSpec.createSameType(typeInfo1));
explicitDataTypePhysicalVariationList.add(decimal64Enable1 ? DataTypePhysicalVariation.DECIMAL_64 : DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col1Expr = new ExprNodeColumnDesc(typeInfo1, columnName, "table", false);
columns.add(columnName);
} else {
scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo1);
// Adjust the decimal type to the scalar's type...
if (typeInfo1 instanceof DecimalTypeInfo) {
typeInfo1 = getDecimalScalarTypeInfo(scalar1Object);
}
col1Expr = new ExprNodeConstantDesc(typeInfo1, scalar1Object);
}
ExprNodeDesc col2Expr;
Object scalar2Object = null;
final boolean decimal64Enable2 = checkDecimal64(tryDecimal64, typeInfo2);
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
generationSpecList.add(GenerationSpec.createSameType(typeInfo2));
explicitDataTypePhysicalVariationList.add(decimal64Enable2 ? DataTypePhysicalVariation.DECIMAL_64 : DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col2Expr = new ExprNodeColumnDesc(typeInfo2, columnName, "table", false);
columns.add(columnName);
} else {
scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo2);
// Adjust the decimal type to the scalar's type...
if (typeInfo2 instanceof DecimalTypeInfo) {
typeInfo2 = getDecimalScalarTypeInfo(scalar2Object);
}
col2Expr = new ExprNodeConstantDesc(typeInfo2, scalar2Object);
}
List<ObjectInspector> objectInspectorList = new ArrayList<ObjectInspector>();
objectInspectorList.add(VectorRandomRowSource.getObjectInspector(typeInfo1));
objectInspectorList.add(VectorRandomRowSource.getObjectInspector(typeInfo2));
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
children.add(col2Expr);
// ----------------------------------------------------------------------------------------------
String[] columnNames = columns.toArray(new String[0]);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
GenericUDF genericUdf;
switch(arithmetic) {
case ADD:
genericUdf = new GenericUDFOPPlus();
break;
case SUBTRACT:
genericUdf = new GenericUDFOPMinus();
break;
case MULTIPLY:
genericUdf = new GenericUDFOPMultiply();
break;
case DIVIDE:
genericUdf = new GenericUDFOPDivide();
break;
case MODULUS:
genericUdf = new GenericUDFOPMod();
break;
default:
throw new RuntimeException("Unexpected arithmetic " + arithmetic);
}
ObjectInspector[] objectInspectors = objectInspectorList.toArray(new ObjectInspector[objectInspectorList.size()]);
ObjectInspector outputObjectInspector = null;
try {
outputObjectInspector = genericUdf.initialize(objectInspectors);
} catch (Exception e) {
Assert.fail(e.toString());
}
TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(outputTypeInfo, genericUdf, children);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[ArithmeticTestMode.count][];
for (int i = 0; i < ArithmeticTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
ArithmeticTestMode arithmeticTestMode = ArithmeticTestMode.values()[i];
switch(arithmeticTestMode) {
case ROW_MODE:
doRowArithmeticTest(typeInfo1, typeInfo2, columns, children, exprDesc, arithmetic, randomRows, columnScalarMode, rowSource.rowStructObjectInspector(), outputTypeInfo, resultObjects);
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
doVectorArithmeticTest(typeInfo1, typeInfo2, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, exprDesc, arithmetic, arithmeticTestMode, columnScalarMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects);
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + arithmeticTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < ArithmeticTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " typeName1 " + typeName1 + " typeName2 " + typeName2 + " outputTypeName " + outputTypeInfo.getTypeName() + " " + arithmetic + " " + ArithmeticTestMode.values()[v] + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ? " scalar1 " + scalar1Object.toString() : "") + " row values " + Arrays.toString(randomRows[i]) + (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ? " scalar2 " + scalar2Object.toString() : ""));
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " typeName1 " + typeName1 + " typeName2 " + typeName2 + " outputTypeName " + outputTypeInfo.getTypeName() + " " + arithmetic + " " + ArithmeticTestMode.values()[v] + " " + columnScalarMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ? " scalar1 " + scalar1Object.toString() : "") + " row values " + Arrays.toString(randomRows[i]) + (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ? " scalar2 " + scalar2Object.toString() : ""));
}
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorIndex method doVectorCastTest.
private boolean doVectorCastTest(TypeInfo typeInfo, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, GenericUDF udf, ExprNodeGenericFuncDesc exprDesc, IndexTestMode indexTestMode, VectorRandomBatchSource batchSource, ObjectInspector objectInspector, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception {
HiveConf hiveConf = new HiveConf();
if (indexTestMode == IndexTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
vectorExpression.transientInit(hiveConf);
if (indexTestMode == IndexTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " indexTestMode " + indexTestMode + " vectorExpression " + vectorExpression.toString());
}
System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo.toString() +
" indexTestMode " + indexTestMode +
" vectorExpression " + vectorExpression.toString());
*/
VectorRandomRowSource rowSource = batchSource.getRowSource();
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { outputTypeInfo }, new int[] { vectorExpression.getOutputColumnNum() });
Object[] scrqtchRow = new Object[1];
/*
System.out.println(
"*DEBUG* typeInfo1 " + typeInfo1.toString() +
" typeInfo2 " + typeInfo2.toString() +
" arithmeticTestMode " + arithmeticTestMode +
" columnScalarMode " + columnScalarMode +
" vectorExpression " + vectorExpression.toString());
*/
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, objectInspector, resultObjects);
rowIndex += batch.size;
}
return true;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorCastStatement method doVectorCastTest.
private boolean doVectorCastTest(TypeInfo typeInfo, TypeInfo targetTypeInfo, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, CastStmtTestMode castStmtTestMode, VectorRandomBatchSource batchSource, Object[] resultObjects) throws Exception {
GenericUDF udf;
try {
udf = VectorizationContext.getGenericUDFForCast(targetTypeInfo);
} catch (HiveException e) {
return false;
}
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(targetTypeInfo, udf, children);
HiveConf hiveConf = new HiveConf();
if (castStmtTestMode == CastStmtTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
vectorExpression.transientInit(hiveConf);
if (castStmtTestMode == CastStmtTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " castStmtTestMode " + castStmtTestMode + " vectorExpression " + vectorExpression.toString());
}
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo.toString() +
" targetTypeInfo " + targetTypeInfo +
" castStmtTestMode " + castStmtTestMode +
" vectorExpression " + vectorExpression.toString());
*/
VectorRandomRowSource rowSource = batchSource.getRowSource();
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { targetTypeInfo }, new int[] { vectorExpression.getOutputColumnNum() });
Object[] scrqtchRow = new Object[1];
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, resultObjects);
rowIndex += batch.size;
}
return true;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorStringConcat method doStringConcatTestsWithDiffColumnScalar.
private void doStringConcatTestsWithDiffColumnScalar(Random random, String stringTypeName1, String stringTypeName2, ColumnScalarMode columnScalarMode) throws Exception {
TypeInfo stringTypeInfo1 = TypeInfoUtils.getTypeInfoFromTypeString(stringTypeName1);
PrimitiveCategory stringPrimitiveCategory1 = ((PrimitiveTypeInfo) stringTypeInfo1).getPrimitiveCategory();
TypeInfo stringTypeInfo2 = TypeInfoUtils.getTypeInfoFromTypeString(stringTypeName2);
PrimitiveCategory stringPrimitiveCategory2 = ((PrimitiveTypeInfo) stringTypeInfo2).getPrimitiveCategory();
String functionName = "concat";
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
int columnNum = 1;
ExprNodeDesc col1Expr;
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
generationSpecList.add(GenerationSpec.createSameType(stringTypeInfo1));
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col1Expr = new ExprNodeColumnDesc(stringTypeInfo1, columnName, "table", false);
columns.add(columnName);
} else {
Object scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) stringTypeInfo1);
col1Expr = new ExprNodeConstantDesc(stringTypeInfo1, scalar1Object);
}
ExprNodeDesc col2Expr;
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
generationSpecList.add(GenerationSpec.createSameType(stringTypeInfo2));
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col2Expr = new ExprNodeColumnDesc(stringTypeInfo2, columnName, "table", false);
columns.add(columnName);
} else {
Object scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) stringTypeInfo2);
col2Expr = new ExprNodeConstantDesc(stringTypeInfo2, scalar2Object);
}
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
children.add(col2Expr);
// ----------------------------------------------------------------------------------------------
String[] columnNames = columns.toArray(new String[0]);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
String[] outputScratchTypeNames = new String[] { "string" };
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, outputScratchTypeNames, null);
GenericUDF genericUdf;
FunctionInfo funcInfo = null;
try {
funcInfo = FunctionRegistry.getFunctionInfo(functionName);
} catch (SemanticException e) {
Assert.fail("Failed to load " + functionName + " " + e);
}
genericUdf = funcInfo.getGenericUDF();
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[StringConcatTestMode.count][];
for (int i = 0; i < StringConcatTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
StringConcatTestMode stringConcatTestMode = StringConcatTestMode.values()[i];
switch(stringConcatTestMode) {
case ROW_MODE:
doRowStringConcatTest(stringTypeInfo1, stringTypeInfo2, columns, children, randomRows, columnScalarMode, rowSource.rowStructObjectInspector(), genericUdf, resultObjects);
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
doVectorStringConcatTest(stringTypeInfo1, stringTypeInfo2, columns, rowSource.typeInfos(), children, stringConcatTestMode, columnScalarMode, batchSource, batchContext, rowSource.rowStructObjectInspector(), genericUdf, resultObjects);
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + stringConcatTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < StringConcatTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " " + StringConcatTestMode.values()[v] + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i]));
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " " + StringConcatTestMode.values()[v] + " " + columnScalarMode + " result \"" + vectorResult.toString() + "\"" + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result \"" + expectedResult.toString() + "\"" + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
}
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorStringUnary method doTests.
private void doTests(Random random, String typeName, String functionName) throws Exception {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
TypeInfo targetTypeInfo;
if (functionName.equals("char_length") || functionName.equals("length") || functionName.equals("octet_length")) {
targetTypeInfo = TypeInfoFactory.intTypeInfo;
} else {
targetTypeInfo = typeInfo;
}
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
int columnNum = 1;
ExprNodeDesc col1Expr;
StringGenerationOption stringGenerationOption = new StringGenerationOption(true, true);
generationSpecList.add(GenerationSpec.createStringFamily(typeInfo, stringGenerationOption));
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col1Expr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
columns.add(columnName);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
// ----------------------------------------------------------------------------------------------
String[] columnNames = columns.toArray(new String[0]);
String[] outputScratchTypeNames = new String[] { targetTypeInfo.getTypeName() };
DataTypePhysicalVariation[] outputDataTypePhysicalVariations = new DataTypePhysicalVariation[] { DataTypePhysicalVariation.NONE };
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, outputScratchTypeNames, outputDataTypePhysicalVariations);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
GenericUDF genericUdf;
FunctionInfo funcInfo = null;
try {
funcInfo = FunctionRegistry.getFunctionInfo(functionName);
} catch (SemanticException e) {
Assert.fail("Failed to load " + functionName + " " + e);
}
genericUdf = funcInfo.getGenericUDF();
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[StringUnaryTestMode.count][];
for (int i = 0; i < StringUnaryTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
StringUnaryTestMode stringUnaryTestMode = StringUnaryTestMode.values()[i];
switch(stringUnaryTestMode) {
case ROW_MODE:
doRowIfTest(typeInfo, targetTypeInfo, columns, children, randomRows, rowSource.rowStructObjectInspector(), genericUdf, resultObjects);
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
doVectorIfTest(typeInfo, targetTypeInfo, columns, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, stringUnaryTestMode, batchSource, batchContext, genericUdf, resultObjects);
break;
default:
throw new RuntimeException("Unexpected STRING Unary test mode " + stringUnaryTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < StringUnaryTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " " + StringUnaryTestMode.values()[v] + " typeName " + typeName + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " functionName " + functionName + " genericUdf " + genericUdf.getClass().getSimpleName());
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " " + StringUnaryTestMode.values()[v] + " typeName " + typeName + " result \"" + vectorResult.toString() + "\"" + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result \"" + expectedResult.toString() + "\"" + " (" + expectedResult.getClass().getSimpleName() + ")" + " functionName " + functionName + " genericUdf " + genericUdf.getClass().getSimpleName());
}
}
}
}
}
Aggregations