use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project cdap by caskdata.
the class StandardObjectInspectorsTest method testStandardUnionObjectInspector.
@Test
public void testStandardUnionObjectInspector() throws Throwable {
try {
ArrayList<ObjectInspector> objectInspectors = new ArrayList<>();
// add primitive types
objectInspectors.add(PrimitiveObjectInspectorFactory.javaIntObjectInspector);
objectInspectors.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
objectInspectors.add(PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
// add a list
objectInspectors.add(ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaIntObjectInspector));
// add a map
objectInspectors.add(ObjectInspectorFactory.getStandardMapObjectInspector(PrimitiveObjectInspectorFactory.javaIntObjectInspector, PrimitiveObjectInspectorFactory.javaStringObjectInspector));
// add a struct
List<String> fieldNames = new ArrayList<>();
fieldNames.add("myDouble");
fieldNames.add("myLong");
ArrayList<ObjectInspector> fieldObjectInspectors = new ArrayList<>();
fieldObjectInspectors.add(PrimitiveObjectInspectorFactory.javaDoubleObjectInspector);
fieldObjectInspectors.add(PrimitiveObjectInspectorFactory.javaLongObjectInspector);
objectInspectors.add(ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldObjectInspectors));
StandardUnionObjectInspector uoi1 = ObjectInspectorFactory.getStandardUnionObjectInspector(objectInspectors);
StandardUnionObjectInspector uoi2 = ObjectInspectorFactory.getStandardUnionObjectInspector((ArrayList<ObjectInspector>) objectInspectors.clone());
Assert.assertEquals(uoi1, uoi2);
Assert.assertEquals(ObjectInspectorUtils.getObjectInspectorName(uoi1), ObjectInspectorUtils.getObjectInspectorName(uoi2));
Assert.assertTrue(ObjectInspectorUtils.compareTypes(uoi1, uoi2));
// compareSupported returns false because Union can contain
// an object of Map
Assert.assertFalse(ObjectInspectorUtils.compareSupported(uoi1));
// construct unionObjectInspector without Map field.
ArrayList<ObjectInspector> ois = (ArrayList<ObjectInspector>) objectInspectors.clone();
ois.set(4, PrimitiveObjectInspectorFactory.javaIntObjectInspector);
Assert.assertTrue(ObjectInspectorUtils.compareSupported(ObjectInspectorFactory.getStandardUnionObjectInspector(ois)));
// metadata
Assert.assertEquals(Category.UNION, uoi1.getCategory());
List<? extends ObjectInspector> uois = uoi1.getObjectInspectors();
Assert.assertEquals(6, uois.size());
for (int i = 0; i < 6; i++) {
Assert.assertEquals(objectInspectors.get(i), uois.get(i));
}
StringBuilder unionTypeName = new StringBuilder();
unionTypeName.append("uniontype<");
for (int i = 0; i < uois.size(); i++) {
if (i > 0) {
unionTypeName.append(",");
}
unionTypeName.append(uois.get(i).getTypeName());
}
unionTypeName.append(">");
Assert.assertEquals(unionTypeName.toString(), uoi1.getTypeName());
// TypeInfo
TypeInfo typeInfo1 = TypeInfoUtils.getTypeInfoFromObjectInspector(uoi1);
Assert.assertEquals(Category.UNION, typeInfo1.getCategory());
Assert.assertEquals(UnionTypeInfo.class.getName(), typeInfo1.getClass().getName());
Assert.assertEquals(typeInfo1.getTypeName(), uoi1.getTypeName());
Assert.assertEquals(typeInfo1, TypeInfoUtils.getTypeInfoFromTypeString(uoi1.getTypeName()));
TypeInfo typeInfo2 = TypeInfoUtils.getTypeInfoFromObjectInspector(uoi2);
Assert.assertEquals(typeInfo1, typeInfo2);
Assert.assertEquals(TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfo1), TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(typeInfo2));
Assert.assertEquals(TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo1), TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo2));
// null
Assert.assertNull(uoi1.getField(null));
Assert.assertEquals(-1, uoi1.getTag(null));
// Union
UnionObject union = new StandardUnionObjectInspector.StandardUnion((byte) 0, 1);
Assert.assertEquals(0, uoi1.getTag(union));
Assert.assertEquals(1, uoi1.getField(union));
Assert.assertEquals("{0:1}", SerDeUtils.getJSONString(union, uoi1));
Assert.assertEquals(0, ObjectInspectorUtils.compare(union, uoi1, new StandardUnionObjectInspector.StandardUnion((byte) 0, 1), uoi2));
Assert.assertTrue(ObjectInspectorUtils.copyToStandardObject(union, uoi1).equals(1));
union = new StandardUnionObjectInspector.StandardUnion((byte) 1, "two");
Assert.assertEquals(1, uoi1.getTag(union));
Assert.assertEquals("two", uoi1.getField(union));
Assert.assertEquals("{1:\"two\"}", SerDeUtils.getJSONString(union, uoi1));
Assert.assertEquals(0, ObjectInspectorUtils.compare(union, uoi1, new StandardUnionObjectInspector.StandardUnion((byte) 1, "two"), uoi2));
Assert.assertTrue(ObjectInspectorUtils.copyToStandardObject(union, uoi1).equals("two"));
union = new StandardUnionObjectInspector.StandardUnion((byte) 2, true);
Assert.assertEquals(2, uoi1.getTag(union));
Assert.assertEquals(true, uoi1.getField(union));
Assert.assertEquals("{2:true}", SerDeUtils.getJSONString(union, uoi1));
Assert.assertEquals(0, ObjectInspectorUtils.compare(union, uoi1, new StandardUnionObjectInspector.StandardUnion((byte) 2, true), uoi2));
Assert.assertTrue(ObjectInspectorUtils.copyToStandardObject(union, uoi1).equals(true));
ArrayList<Integer> iList = new ArrayList<>();
iList.add(4);
iList.add(5);
union = new StandardUnionObjectInspector.StandardUnion((byte) 3, iList);
Assert.assertEquals(3, uoi1.getTag(union));
Assert.assertEquals(iList, uoi1.getField(union));
Assert.assertEquals("{3:[4,5]}", SerDeUtils.getJSONString(union, uoi1));
Assert.assertEquals(0, ObjectInspectorUtils.compare(union, uoi1, new StandardUnionObjectInspector.StandardUnion((byte) 3, iList.clone()), uoi2));
Assert.assertTrue(ObjectInspectorUtils.copyToStandardObject(union, uoi1).equals(iList));
HashMap<Integer, String> map = new HashMap<>();
map.put(6, "six");
map.put(7, "seven");
map.put(8, "eight");
union = new StandardUnionObjectInspector.StandardUnion((byte) 4, map);
Assert.assertEquals(4, uoi1.getTag(union));
Assert.assertEquals(map, uoi1.getField(union));
Assert.assertEquals("{4:{6:\"six\",7:\"seven\",8:\"eight\"}}", SerDeUtils.getJSONString(union, uoi1));
Throwable th = null;
try {
ObjectInspectorUtils.compare(union, uoi1, new StandardUnionObjectInspector.StandardUnion((byte) 4, map.clone()), uoi2, null);
} catch (Throwable t) {
th = t;
}
Assert.assertNotNull(th);
Assert.assertEquals("Compare on map type not supported!", th.getMessage());
Assert.assertTrue(ObjectInspectorUtils.copyToStandardObject(union, uoi1).equals(map));
ArrayList<Object> struct = new ArrayList<>(2);
struct.add(9.0);
struct.add(10L);
union = new StandardUnionObjectInspector.StandardUnion((byte) 5, struct);
Assert.assertEquals(5, uoi1.getTag(union));
Assert.assertEquals(struct, uoi1.getField(union));
Assert.assertEquals("{5:{\"mydouble\":9.0,\"mylong\":10}}", SerDeUtils.getJSONString(union, uoi1));
Assert.assertEquals(0, ObjectInspectorUtils.compare(union, uoi1, new StandardUnionObjectInspector.StandardUnion((byte) 5, struct.clone()), uoi2));
Assert.assertTrue(ObjectInspectorUtils.copyToStandardObject(union, uoi1).equals(struct));
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class VectorizedBatchUtil method convertToStandardStructObjectInspector.
public static StandardStructObjectInspector convertToStandardStructObjectInspector(StructObjectInspector structObjectInspector) throws HiveException {
List<? extends StructField> fields = structObjectInspector.getAllStructFieldRefs();
List<ObjectInspector> oids = new ArrayList<ObjectInspector>();
ArrayList<String> columnNames = new ArrayList<String>();
for (StructField field : fields) {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(field.getFieldObjectInspector().getTypeName());
ObjectInspector standardWritableObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
oids.add(standardWritableObjectInspector);
columnNames.add(field.getFieldName());
}
return ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, oids);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class AggregationBase method doRowTest.
protected static boolean doRowTest(TypeInfo typeInfo, GenericUDAFEvaluator evaluator, TypeInfo outputTypeInfo, GenericUDAFEvaluator.Mode udafEvaluatorMode, int maxKeyCount, List<String> columns, List<ExprNodeDesc> children, Object[][] randomRows, ObjectInspector rowInspector, Object[] results) throws Exception {
// System.out.println("*ROW AGGREGATION EXPRESSION* " + evaluator.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo.toString() +
" aggregationTestMode ROW_MODE" +
" outputTypeInfo " + outputTypeInfo.toString());
*/
// Last entry is for a NULL key.
AggregationBuffer[] aggregationBuffers = new AggregationBuffer[maxKeyCount + 1];
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(outputTypeInfo);
final boolean isCountStar;
if (evaluator instanceof GenericUDAFCountEvaluator) {
GenericUDAFCountEvaluator countEvaluator = (GenericUDAFCountEvaluator) evaluator;
isCountStar = countEvaluator.getCountAllColumns();
} else {
isCountStar = false;
}
final Object[] parameterArray = isCountStar ? new Object[0] : new Object[1];
final int rowCount = randomRows.length;
for (int i = 0; i < rowCount; i++) {
Object[] row = randomRows[i];
ShortWritable shortWritable = (ShortWritable) row[0];
final int key;
if (shortWritable == null) {
key = maxKeyCount;
} else {
key = shortWritable.get();
}
AggregationBuffer aggregationBuffer = aggregationBuffers[key];
if (aggregationBuffer == null) {
aggregationBuffer = evaluator.getNewAggregationBuffer();
aggregationBuffers[key] = aggregationBuffer;
}
if (!isCountStar) {
parameterArray[0] = row[1];
}
evaluator.aggregate(aggregationBuffer, parameterArray);
}
final boolean isPrimitive = (outputTypeInfo instanceof PrimitiveTypeInfo);
final boolean isPartial = (udafEvaluatorMode == GenericUDAFEvaluator.Mode.PARTIAL1 || udafEvaluatorMode == GenericUDAFEvaluator.Mode.PARTIAL2);
for (short key = 0; key < maxKeyCount + 1; key++) {
AggregationBuffer aggregationBuffer = aggregationBuffers[key];
if (aggregationBuffer != null) {
final Object result;
if (isPartial) {
result = evaluator.terminatePartial(aggregationBuffer);
} else {
result = evaluator.terminate(aggregationBuffer);
}
Object copyResult;
if (result == null) {
copyResult = null;
} else if (isPrimitive) {
copyResult = VectorRandomRowSource.getWritablePrimitiveObject((PrimitiveTypeInfo) outputTypeInfo, objectInspector, result);
} else {
copyResult = ObjectInspectorUtils.copyToStandardObject(result, objectInspector, ObjectInspectorCopyOption.WRITABLE);
}
results[key] = copyResult;
}
}
return true;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class TestVectorAggregation method doMerge.
private void doMerge(GenericUDAFEvaluator.Mode mergeUdafEvaluatorMode, Random random, String aggregationName, TypeInfo typeInfo, GenerationSpec keyGenerationSpec, List<String> columns, String[] columnNames, int dataAggrMaxKeyCount, int reductionFactor, TypeInfo partial1OutputTypeInfo, Object[] partial1ResultsArray) throws Exception {
List<GenerationSpec> mergeAggrGenerationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> mergeDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
mergeAggrGenerationSpecList.add(keyGenerationSpec);
mergeDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
// Use OMIT for both. We will fill in the data from the PARTIAL1 results.
GenerationSpec mergeGenerationSpec = GenerationSpec.createOmitGeneration(partial1OutputTypeInfo);
mergeAggrGenerationSpecList.add(mergeGenerationSpec);
mergeDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
ExprNodeColumnDesc mergeCol1Expr = new ExprNodeColumnDesc(partial1OutputTypeInfo, "col1", "table", false);
List<ExprNodeDesc> mergeParameters = new ArrayList<ExprNodeDesc>();
mergeParameters.add(mergeCol1Expr);
final int mergeParameterCount = mergeParameters.size();
ObjectInspector[] mergeParameterObjectInspectors = new ObjectInspector[mergeParameterCount];
for (int i = 0; i < mergeParameterCount; i++) {
TypeInfo paramTypeInfo = mergeParameters.get(i).getTypeInfo();
mergeParameterObjectInspectors[i] = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(paramTypeInfo);
}
VectorRandomRowSource mergeRowSource = new VectorRandomRowSource();
mergeRowSource.initGenerationSpecSchema(random, mergeAggrGenerationSpecList, /* maxComplexDepth */
0, /* allowNull */
false, /* isUnicodeOk */
true, mergeDataTypePhysicalVariationList);
Object[][] mergeRandomRows = mergeRowSource.randomRows(TEST_ROW_COUNT);
// Reduce the key range to cause there to be work for each PARTIAL2 key.
final int mergeMaxKeyCount = dataAggrMaxKeyCount / reductionFactor;
Object[] partial1Results = (Object[]) partial1ResultsArray[0];
short partial1Key = 0;
for (int i = 0; i < mergeRandomRows.length; i++) {
// Find a non-NULL entry...
while (true) {
if (partial1Key >= dataAggrMaxKeyCount) {
partial1Key = 0;
}
if (partial1Results[partial1Key] != null) {
break;
}
partial1Key++;
}
final short mergeKey = (short) (partial1Key % mergeMaxKeyCount);
mergeRandomRows[i][0] = new ShortWritable(mergeKey);
mergeRandomRows[i][1] = partial1Results[partial1Key];
partial1Key++;
}
VectorRandomBatchSource mergeBatchSource = VectorRandomBatchSource.createInterestingBatches(random, mergeRowSource, mergeRandomRows, null);
// We need to pass the original TypeInfo in for initializing the evaluator.
GenericUDAFEvaluator mergeEvaluator = getEvaluator(aggregationName, typeInfo);
/*
System.out.println(
"*DEBUG* GenericUDAFEvaluator for " + aggregationName + ", " + typeInfo.getTypeName() + ": " +
mergeEvaluator.getClass().getSimpleName());
*/
// The only way to get the return object inspector (and its return type) is to
// initialize it...
ObjectInspector mergeReturnOI = mergeEvaluator.init(mergeUdafEvaluatorMode, mergeParameterObjectInspectors);
TypeInfo mergeOutputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(mergeReturnOI);
Object[] mergeResultsArray = new Object[AggregationTestMode.count];
executeAggregationTests(aggregationName, partial1OutputTypeInfo, mergeEvaluator, mergeOutputTypeInfo, mergeUdafEvaluatorMode, mergeMaxKeyCount, columns, columnNames, mergeParameters, mergeRandomRows, mergeRowSource, mergeBatchSource, /* tryDecimal64 */
false, mergeResultsArray);
verifyAggregationResults(partial1OutputTypeInfo, mergeOutputTypeInfo, mergeMaxKeyCount, mergeUdafEvaluatorMode, mergeResultsArray);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo in project hive by apache.
the class TestVectorCoalesceElt method doCoalesceOnRandomDataType.
private boolean doCoalesceOnRandomDataType(Random random, int iteration, boolean isCoalesce, boolean isEltIndexConst, int columnCount, int[] constantColumns, int[] nullConstantColumns, boolean allowNulls) throws Exception {
String typeName;
if (isCoalesce) {
typeName = VectorRandomRowSource.getRandomTypeName(random, SupportedTypes.PRIMITIVES, /* allowedTypeNameSet */
null);
typeName = VectorRandomRowSource.getDecoratedTypeName(random, typeName, SupportedTypes.PRIMITIVES, /* allowedTypeNameSet */
null, /* depth */
0, /* maxDepth */
2);
} else {
// ELT only choose between STRINGs.
typeName = "string";
}
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
// ----------------------------------------------------------------------------------------------
final TypeInfo intTypeInfo;
ObjectInspector intObjectInspector;
if (isCoalesce) {
intTypeInfo = null;
intObjectInspector = null;
} else {
intTypeInfo = TypeInfoFactory.intTypeInfo;
intObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(intTypeInfo);
}
ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(typeInfo);
// ----------------------------------------------------------------------------------------------
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
int columnNum = 1;
if (!isCoalesce) {
List<Object> intValueList = new ArrayList<Object>();
for (int i = -1; i < columnCount + 2; i++) {
intValueList.add(new IntWritable(i));
}
final int intValueListCount = intValueList.size();
ExprNodeDesc intColExpr;
if (!isEltIndexConst) {
generationSpecList.add(GenerationSpec.createValueList(intTypeInfo, intValueList));
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + columnNum++;
columns.add(columnName);
intColExpr = new ExprNodeColumnDesc(intTypeInfo, columnName, "table", false);
} else {
final Object scalarObject;
if (random.nextInt(10) != 0) {
scalarObject = intValueList.get(random.nextInt(intValueListCount));
} else {
scalarObject = null;
}
intColExpr = new ExprNodeConstantDesc(typeInfo, scalarObject);
}
children.add(intColExpr);
}
for (int c = 0; c < columnCount; c++) {
ExprNodeDesc colExpr;
if (!contains(constantColumns, c)) {
generationSpecList.add(GenerationSpec.createSameType(typeInfo));
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + columnNum++;
columns.add(columnName);
colExpr = new ExprNodeColumnDesc(typeInfo, columnName, "table", false);
} else {
final Object scalarObject;
if (!contains(nullConstantColumns, c)) {
scalarObject = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) typeInfo);
} else {
scalarObject = null;
}
colExpr = new ExprNodeConstantDesc(typeInfo, scalarObject);
}
children.add(colExpr);
}
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
allowNulls, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
String[] columnNames = columns.toArray(new String[0]);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
final GenericUDF udf = (isCoalesce ? new GenericUDFCoalesce() : new GenericUDFElt());
final int start = isCoalesce ? 0 : 1;
final int end = start + columnCount;
ObjectInspector[] argumentOIs = new ObjectInspector[end];
if (!isCoalesce) {
argumentOIs[0] = intObjectInspector;
}
for (int i = start; i < end; i++) {
argumentOIs[i] = objectInspector;
}
final ObjectInspector outputObjectInspector = udf.initialize(argumentOIs);
TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(outputObjectInspector);
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(typeInfo, udf, children);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[CoalesceEltTestMode.count][];
for (int i = 0; i < CoalesceEltTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
CoalesceEltTestMode coalesceEltTestMode = CoalesceEltTestMode.values()[i];
switch(coalesceEltTestMode) {
case ROW_MODE:
if (!doRowCastTest(typeInfo, columns, children, udf, exprDesc, randomRows, rowSource.rowStructObjectInspector(), exprDesc.getWritableObjectInspector(), resultObjects)) {
return false;
}
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
if (!doVectorCastTest(typeInfo, iteration, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, udf, exprDesc, coalesceEltTestMode, batchSource, exprDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects)) {
return false;
}
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + coalesceEltTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < CoalesceEltTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
CoalesceEltTestMode coalesceEltTestMode = CoalesceEltTestMode.values()[v];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + coalesceEltTestMode + " iteration " + iteration + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result '" + expectedResult.toString()) + "'" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " sourceTypeName " + typeName + " " + coalesceEltTestMode + " iteration " + iteration + " result '" + vectorResult.toString() + "'" + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result '" + expectedResult.toString() + "'" + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]) + " exprDesc " + exprDesc.toString());
}
}
}
}
return true;
}
Aggregations