use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorMapJoinFastRowHashMap method testBigIntRowsClippedExact.
@Test
public void testBigIntRowsClippedExact() throws Exception {
random = new Random(2122);
// Use a large capacity that doesn't require expansion, yet.
VectorMapJoinFastLongHashMapContainer map = new VectorMapJoinFastLongHashMapContainer(false, false, HashTableKeyType.LONG, LARGE_CAPACITY, LOAD_FACTOR, LARGE_WB_SIZE, -1, tableDesc, 4);
VerifyFastRowHashMap verifyTable = new VerifyFastRowHashMap();
VectorRandomRowSource valueSource = new VectorRandomRowSource();
valueSource.init(random, VectorRandomRowSource.SupportedTypes.ALL, 4, /* allowNulls */
false, /* isUnicodeOk */
false);
int rowCount = 1000;
Object[][] rows = valueSource.randomRows(rowCount);
addAndVerifyRows(valueSource, rows, map, HashTableKeyType.LONG, verifyTable, new String[] { "bigint" }, /* doClipping */
true, /* useExactBytes */
true);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorFilterCompare method doTestsWithDiffColumnScalar.
private void doTestsWithDiffColumnScalar(Random random, TypeInfo typeInfo1, TypeInfo typeInfo2, ColumnScalarMode columnScalarMode, Comparison comparison, boolean tryDecimal64) throws Exception {
String typeName1 = typeInfo1.getTypeName();
PrimitiveCategory primitiveCategory1 = ((PrimitiveTypeInfo) typeInfo1).getPrimitiveCategory();
String typeName2 = typeInfo2.getTypeName();
PrimitiveCategory primitiveCategory2 = ((PrimitiveTypeInfo) typeInfo2).getPrimitiveCategory();
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
int columnNum = 1;
ExprNodeDesc col1Expr;
Object scalar1Object = null;
final boolean decimal64Enable1 = checkDecimal64(tryDecimal64, typeInfo1);
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
generationSpecList.add(GenerationSpec.createSameType(typeInfo1));
explicitDataTypePhysicalVariationList.add(decimal64Enable1 ? DataTypePhysicalVariation.DECIMAL_64 : DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col1Expr = new ExprNodeColumnDesc(typeInfo1, columnName, "table", false);
columns.add(columnName);
} else {
scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo1);
// Adjust the decimal type to the scalar's type...
if (typeInfo1 instanceof DecimalTypeInfo) {
typeInfo1 = getDecimalScalarTypeInfo(scalar1Object);
}
col1Expr = new ExprNodeConstantDesc(typeInfo1, scalar1Object);
}
ExprNodeDesc col2Expr;
Object scalar2Object = null;
final boolean decimal64Enable2 = checkDecimal64(tryDecimal64, typeInfo2);
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
generationSpecList.add(GenerationSpec.createSameType(typeInfo2));
explicitDataTypePhysicalVariationList.add(decimal64Enable2 ? DataTypePhysicalVariation.DECIMAL_64 : DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col2Expr = new ExprNodeColumnDesc(typeInfo2, columnName, "table", false);
columns.add(columnName);
} else {
scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo2);
// Adjust the decimal type to the scalar's type...
if (typeInfo2 instanceof DecimalTypeInfo) {
typeInfo2 = getDecimalScalarTypeInfo(scalar2Object);
}
col2Expr = new ExprNodeConstantDesc(typeInfo2, scalar2Object);
}
List<ObjectInspector> objectInspectorList = new ArrayList<ObjectInspector>();
objectInspectorList.add(VectorRandomRowSource.getObjectInspector(typeInfo1));
objectInspectorList.add(VectorRandomRowSource.getObjectInspector(typeInfo2));
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
children.add(col2Expr);
// ----------------------------------------------------------------------------------------------
String[] columnNames = columns.toArray(new String[0]);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
GenericUDF genericUdf;
switch(comparison) {
case EQUALS:
genericUdf = new GenericUDFOPEqual();
break;
case LESS_THAN:
genericUdf = new GenericUDFOPLessThan();
break;
case LESS_THAN_EQUAL:
genericUdf = new GenericUDFOPEqualOrLessThan();
break;
case GREATER_THAN:
genericUdf = new GenericUDFOPGreaterThan();
break;
case GREATER_THAN_EQUAL:
genericUdf = new GenericUDFOPEqualOrGreaterThan();
break;
case NOT_EQUALS:
genericUdf = new GenericUDFOPNotEqual();
break;
default:
throw new RuntimeException("Unexpected arithmetic " + comparison);
}
ObjectInspector[] objectInspectors = objectInspectorList.toArray(new ObjectInspector[objectInspectorList.size()]);
ObjectInspector outputObjectInspector = null;
try {
outputObjectInspector = genericUdf.initialize(objectInspectors);
} catch (Exception e) {
Assert.fail(e.toString());
}
TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(outputTypeInfo, genericUdf, children);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[FilterCompareTestMode.count][];
for (int i = 0; i < FilterCompareTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
FilterCompareTestMode filterCompareTestMode = FilterCompareTestMode.values()[i];
switch(filterCompareTestMode) {
case ROW_MODE:
doRowFilterCompareTest(typeInfo1, typeInfo2, columns, children, exprDesc, comparison, randomRows, columnScalarMode, rowSource.rowStructObjectInspector(), outputTypeInfo, resultObjects);
break;
case ADAPTOR:
case FILTER_VECTOR_EXPRESSION:
case COMPARE_VECTOR_EXPRESSION:
doVectorFilterCompareTest(typeInfo1, typeInfo2, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, exprDesc, comparison, filterCompareTestMode, columnScalarMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects);
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + filterCompareTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < FilterCompareTestMode.count; v++) {
FilterCompareTestMode filterCompareTestMode = FilterCompareTestMode.values()[v];
Object vectorResult = resultObjectsArray[v][i];
if (filterCompareTestMode == FilterCompareTestMode.FILTER_VECTOR_EXPRESSION && expectedResult == null && vectorResult != null) {
// This is OK.
boolean vectorBoolean = ((BooleanWritable) vectorResult).get();
if (vectorBoolean) {
Assert.fail("Row " + i + " typeName1 " + typeName1 + " typeName2 " + typeName2 + " outputTypeName " + outputTypeInfo.getTypeName() + " " + comparison + " " + filterCompareTestMode + " " + columnScalarMode + " result is NOT NULL and true" + " does not match row-mode expected result is NULL which means false here" + (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ? " scalar1 " + scalar1Object.toString() : "") + " row values " + Arrays.toString(randomRows[i]) + (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ? " scalar2 " + scalar2Object.toString() : ""));
}
} else if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " typeName1 " + typeName1 + " typeName2 " + typeName2 + " outputTypeName " + outputTypeInfo.getTypeName() + " " + comparison + " " + filterCompareTestMode + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ? " scalar1 " + scalar1Object.toString() : "") + " row values " + Arrays.toString(randomRows[i]) + (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ? " scalar2 " + scalar2Object.toString() : ""));
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " typeName1 " + typeName1 + " typeName2 " + typeName2 + " outputTypeName " + outputTypeInfo.getTypeName() + " " + comparison + " " + filterCompareTestMode + " " + columnScalarMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + (columnScalarMode == ColumnScalarMode.SCALAR_COLUMN ? " scalar1 " + scalar1Object.toString() : "") + " row values " + Arrays.toString(randomRows[i]) + (columnScalarMode == ColumnScalarMode.COLUMN_SCALAR ? " scalar2 " + scalar2Object.toString() : ""));
}
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorAggregation method doMerge.
private void doMerge(GenericUDAFEvaluator.Mode mergeUdafEvaluatorMode, Random random, String aggregationName, TypeInfo typeInfo, GenerationSpec keyGenerationSpec, List<String> columns, String[] columnNames, int dataAggrMaxKeyCount, int reductionFactor, TypeInfo partial1OutputTypeInfo, Object[] partial1ResultsArray) throws Exception {
List<GenerationSpec> mergeAggrGenerationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> mergeDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
mergeAggrGenerationSpecList.add(keyGenerationSpec);
mergeDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
// Use OMIT for both. We will fill in the data from the PARTIAL1 results.
GenerationSpec mergeGenerationSpec = GenerationSpec.createOmitGeneration(partial1OutputTypeInfo);
mergeAggrGenerationSpecList.add(mergeGenerationSpec);
mergeDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
ExprNodeColumnDesc mergeCol1Expr = new ExprNodeColumnDesc(partial1OutputTypeInfo, "col1", "table", false);
List<ExprNodeDesc> mergeParameters = new ArrayList<ExprNodeDesc>();
mergeParameters.add(mergeCol1Expr);
final int mergeParameterCount = mergeParameters.size();
ObjectInspector[] mergeParameterObjectInspectors = new ObjectInspector[mergeParameterCount];
for (int i = 0; i < mergeParameterCount; i++) {
TypeInfo paramTypeInfo = mergeParameters.get(i).getTypeInfo();
mergeParameterObjectInspectors[i] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(paramTypeInfo);
}
VectorRandomRowSource mergeRowSource = new VectorRandomRowSource();
mergeRowSource.initGenerationSpecSchema(random, mergeAggrGenerationSpecList, /* maxComplexDepth */
0, /* allowNull */
false, /* isUnicodeOk */
true, mergeDataTypePhysicalVariationList);
Object[][] mergeRandomRows = mergeRowSource.randomRows(TEST_ROW_COUNT);
// Reduce the key range to cause there to be work for each PARTIAL2 key.
final int mergeMaxKeyCount = dataAggrMaxKeyCount / reductionFactor;
Object[] partial1Results = (Object[]) partial1ResultsArray[0];
short partial1Key = 0;
for (int i = 0; i < mergeRandomRows.length; i++) {
// Find a non-NULL entry...
while (true) {
if (partial1Key >= dataAggrMaxKeyCount) {
partial1Key = 0;
}
if (partial1Results[partial1Key] != null) {
break;
}
partial1Key++;
}
final short mergeKey = (short) (partial1Key % mergeMaxKeyCount);
mergeRandomRows[i][0] = new ShortWritable(mergeKey);
mergeRandomRows[i][1] = partial1Results[partial1Key];
partial1Key++;
}
VectorRandomBatchSource mergeBatchSource = VectorRandomBatchSource.createInterestingBatches(random, mergeRowSource, mergeRandomRows, null);
// We need to pass the original TypeInfo in for initializing the evaluator.
GenericUDAFEvaluator mergeEvaluator = getEvaluator(aggregationName, typeInfo);
/*
System.out.println(
"*DEBUG* GenericUDAFEvaluator for " + aggregationName + ", " + typeInfo.getTypeName() + ": " +
mergeEvaluator.getClass().getSimpleName());
*/
// The only way to get the return object inspector (and its return type) is to
// initialize it...
ObjectInspector mergeReturnOI = mergeEvaluator.init(mergeUdafEvaluatorMode, mergeParameterObjectInspectors);
TypeInfo mergeOutputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(mergeReturnOI);
Object[] mergeResultsArray = new Object[AggregationTestMode.count];
executeAggregationTests(aggregationName, partial1OutputTypeInfo, mergeEvaluator, mergeOutputTypeInfo, mergeUdafEvaluatorMode, mergeMaxKeyCount, columns, columnNames, mergeParameters, mergeRandomRows, mergeRowSource, mergeBatchSource, /* tryDecimal64 */
false, mergeResultsArray);
verifyAggregationResults(partial1OutputTypeInfo, mergeOutputTypeInfo, mergeMaxKeyCount, mergeUdafEvaluatorMode, mergeResultsArray);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorCoalesceElt method doVectorCastTest.
private boolean doVectorCastTest(TypeInfo typeInfo, int iteration, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, GenericUDF udf, ExprNodeGenericFuncDesc exprDesc, CoalesceEltTestMode coalesceEltTestMode, VectorRandomBatchSource batchSource, ObjectInspector objectInspector, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception {
HiveConf hiveConf = new HiveConf();
if (coalesceEltTestMode == CoalesceEltTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
vectorExpression.transientInit(hiveConf);
if (coalesceEltTestMode == CoalesceEltTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " coalesceEltTestMode " + coalesceEltTestMode + " vectorExpression " + vectorExpression.toString());
}
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo.toString() +
" coalesceEltTestMode " + coalesceEltTestMode +
" vectorExpression " + vectorExpression.toString());
*/
VectorRandomRowSource rowSource = batchSource.getRowSource();
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { outputTypeInfo }, new int[] { vectorExpression.getOutputColumnNum() });
Object[] scrqtchRow = new Object[1];
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, objectInspector, resultObjects);
rowIndex += batch.size;
}
return true;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource in project hive by apache.
the class TestVectorCoalesceElt method doCoalesceOnRandomDataType.
private boolean doCoalesceOnRandomDataType(Random random, int iteration, boolean isCoalesce, boolean isEltIndexConst, int columnCount, int[] constantColumns, int[] nullConstantColumns, boolean allowNulls) throws Exception {
String typeName;
if (isCoalesce) {
typeName = VectorRandomRowSource.getRandomTypeName(random, SupportedTypes.PRIMITIVES, /* allowedTypeNameSet */
null);
typeName = VectorRandomRowSource.getDecoratedTypeName(random, typeName, SupportedTypes.PRIMITIVES, /* allowedTypeNameSet */
null, /* depth */
0, /* maxDepth */
2);
} else {
// ELT only choose between STRINGs.
typeName = "string";
}
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
// ----------------------------------------------------------------------------------------------
final TypeInfo intTypeInfo;
ObjectInspector intObjectInspector;
if (isCoalesce) {
intTypeInfo = null;
intObjectInspector = null;
} else {
intTypeInfo = TypeInfoFactory.intTypeInfo;
intObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(intTypeInfo);
}
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
// ----------------------------------------------------------------------------------------------
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
int columnNum = 1;
if (!isCoalesce) {
List<Object> intValueList = new ArrayList<Object>();
for (int i = -1; i < columnCount + 2; i++) {
intValueList.add(new IntWritable(i));
}
final int intValueListCount = intValueList.size();
ExprNodeDesc intColExpr;
if (!isEltIndexConst) {
generationSpecList.add(GenerationSpec.createValueList(intTypeInfo, intValueList));
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + columnNum++;
columns.add(columnName);
intColExpr = new ExprNodeColumnDesc(intTypeInfo, columnName, "table", false);
} else {
final Object scalarObject;
if (random.nextInt(10) != 0) {
scalarObject = intValueList.get(random.nextInt(intValueListCount));
} else {
scalarObject = null;
}
intColExpr = new ExprNodeConstantDesc(typeInfo, scalarObject);
}
children.add(intColExpr);
}
for (int c = 0; c < columnCount; c++) {
ExprNodeDesc colExpr;
if (!contains(constantColumns, c)) {
generationSpecList.add(GenerationSpec.createSameType(typeInfo));
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + columnNum++;
columns.add(columnName);
colExpr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
} else {
final Object scalarObject;
if (!contains(nullConstantColumns, c)) {
scalarObject = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo);
} else {
scalarObject = null;
}
colExpr = new ExprNodeConstantDesc(typeInfo, scalarObject);
}
children.add(colExpr);
}
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
allowNulls, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
String[] columnNames = columns.toArray(new String[0]);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
final GenericUDF udf = (isCoalesce ? new GenericUDFCoalesce() : new GenericUDFElt());
final int start = isCoalesce ? 0 : 1;
final int end = start + columnCount;
ObjectInspector[] argumentOIs = new ObjectInspector[end];
if (!isCoalesce) {
argumentOIs[0] = intObjectInspector;
}
for (int i = start; i < end; i++) {
argumentOIs[i] = objectInspector;
}
final ObjectInspector outputObjectInspector = udf.initialize(argumentOIs);
TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(typeInfo, udf, children);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[CoalesceEltTestMode.count][];
for (int i = 0; i < CoalesceEltTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
CoalesceEltTestMode coalesceEltTestMode = CoalesceEltTestMode.values()[i];
switch(coalesceEltTestMode) {
case ROW_MODE:
if (!doRowCastTest(typeInfo, columns, children, udf, exprDesc, randomRows, rowSource.rowStructObjectInspector(), exprDesc.getWritableObjectInspector(), resultObjects)) {
return false;
}
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
if (!doVectorCastTest(typeInfo, iteration, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, udf, exprDesc, coalesceEltTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects)) {
return false;
}
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + coalesceEltTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < CoalesceEltTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
CoalesceEltTestMode coalesceEltTestMode = CoalesceEltTestMode.values()[v];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + coalesceEltTestMode + " iteration " + iteration + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result '" + expectedResult.toString()) + "'" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + coalesceEltTestMode + " iteration " + iteration + " result '" + vectorResult.toString() + "'" + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result '" + expectedResult.toString() + "'" + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
}
}
}
}
return true;
}
Aggregations