use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource in project hive by apache.
the class TestVectorDateAddSub method doDateAddSubTestsWithDiffColumnScalar.
private void doDateAddSubTestsWithDiffColumnScalar(Random random, String dateTimeStringTypeName, String integerTypeName, ColumnScalarMode columnScalarMode, boolean isAdd) throws Exception {
TypeInfo dateTimeStringTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(dateTimeStringTypeName);
PrimitiveCategory dateTimeStringPrimitiveCategory = ((PrimitiveTypeInfo) dateTimeStringTypeInfo).getPrimitiveCategory();
boolean isStringFamily = (dateTimeStringPrimitiveCategory == PrimitiveCategory.STRING || dateTimeStringPrimitiveCategory == PrimitiveCategory.CHAR || dateTimeStringPrimitiveCategory == PrimitiveCategory.VARCHAR);
TypeInfo integerTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(integerTypeName);
PrimitiveCategory integerPrimitiveCategory = ((PrimitiveTypeInfo) integerTypeInfo).getPrimitiveCategory();
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
int columnNum = 1;
ExprNodeDesc col1Expr;
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
if (!isStringFamily) {
generationSpecList.add(GenerationSpec.createSameType(dateTimeStringTypeInfo));
} else {
generationSpecList.add(GenerationSpec.createStringFamilyOtherTypeValue(dateTimeStringTypeInfo, TypeInfoFactory.dateTypeInfo));
}
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col1Expr = new ExprNodeColumnDesc(dateTimeStringTypeInfo, columnName, "table", false);
columns.add(columnName);
} else {
Object scalar1Object;
if (!isStringFamily) {
scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) dateTimeStringTypeInfo);
} else {
scalar1Object = VectorRandomRowSource.randomStringFamilyOtherTypeValue(random, dateTimeStringTypeInfo, TypeInfoFactory.dateTypeInfo, false);
}
col1Expr = new ExprNodeConstantDesc(dateTimeStringTypeInfo, scalar1Object);
}
ExprNodeDesc col2Expr;
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
generationSpecList.add(GenerationSpec.createSameType(integerTypeInfo));
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col2Expr = new ExprNodeColumnDesc(integerTypeInfo, columnName, "table", false);
columns.add(columnName);
} else {
Object scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) integerTypeInfo);
scalar2Object = smallerRange(random, integerPrimitiveCategory, /* wantWritable */
false);
col2Expr = new ExprNodeConstantDesc(integerTypeInfo, scalar2Object);
}
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
children.add(col2Expr);
// ----------------------------------------------------------------------------------------------
String[] columnNames = columns.toArray(new String[0]);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
Object[][] randomRows = rowSource.randomRows(100000);
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
// Fixup numbers to limit the range to 0 ... N-1.
for (int i = 0; i < randomRows.length; i++) {
Object[] row = randomRows[i];
if (row[columnNum - 2] != null) {
row[columnNum - 2] = smallerRange(random, integerPrimitiveCategory, /* wantWritable */
true);
}
}
}
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
String[] outputScratchTypeNames = new String[] { "date" };
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, outputScratchTypeNames, null);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[DateAddSubTestMode.count][];
for (int i = 0; i < DateAddSubTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
GenericUDF udf = (isAdd ? new GenericUDFDateAdd() : new GenericUDFDateSub());
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.dateTypeInfo, udf, children);
DateAddSubTestMode dateAddSubTestMode = DateAddSubTestMode.values()[i];
switch(dateAddSubTestMode) {
case ROW_MODE:
doRowDateAddSubTest(dateTimeStringTypeInfo, integerTypeInfo, columns, children, isAdd, exprDesc, randomRows, columnScalarMode, rowSource.rowStructObjectInspector(), resultObjects);
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
doVectorDateAddSubTest(dateTimeStringTypeInfo, integerTypeInfo, columns, rowSource.typeInfos(), children, isAdd, exprDesc, dateAddSubTestMode, columnScalarMode, batchSource, batchContext, resultObjects);
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + dateAddSubTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < DateAddSubTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " " + DateAddSubTestMode.values()[v] + " isAdd " + isAdd + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i]));
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " " + DateAddSubTestMode.values()[v] + " isAdd " + isAdd + " " + columnScalarMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
}
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource in project hive by apache.
the class TestVectorStructField method doOneStructFieldTest.
private void doOneStructFieldTest(Random random, StructTypeInfo structTypeInfo, String structTypeName, int fieldIndex) throws Exception {
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
int columnNum = 1;
generationSpecList.add(GenerationSpec.createSameType(structTypeInfo));
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
ExprNodeDesc col1Expr;
String columnName = "col" + (columnNum++);
col1Expr = new ExprNodeColumnDesc(structTypeInfo, columnName, "table", false);
columns.add(columnName);
ObjectInspector structObjectInspector = VectorRandomRowSource.getObjectInspector(structTypeInfo);
List<ObjectInspector> objectInspectorList = new ArrayList<ObjectInspector>();
objectInspectorList.add(structObjectInspector);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
// ----------------------------------------------------------------------------------------------
String[] columnNames = columns.toArray(new String[0]);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
List<String> fieldNameList = structTypeInfo.getAllStructFieldNames();
List<TypeInfo> fieldTypeInfoList = structTypeInfo.getAllStructFieldTypeInfos();
String randomFieldName = fieldNameList.get(fieldIndex);
TypeInfo outputTypeInfo = fieldTypeInfoList.get(fieldIndex);
ExprNodeFieldDesc exprNodeFieldDesc = new ExprNodeFieldDesc(outputTypeInfo, col1Expr, randomFieldName, /* isList */
false);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[StructFieldTestMode.count][];
for (int i = 0; i < StructFieldTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
StructFieldTestMode negativeTestMode = StructFieldTestMode.values()[i];
switch(negativeTestMode) {
case ROW_MODE:
doRowStructFieldTest(structTypeInfo, columns, children, exprNodeFieldDesc, randomRows, rowSource.rowStructObjectInspector(), outputTypeInfo, resultObjects);
break;
case VECTOR_EXPRESSION:
doVectorStructFieldTest(structTypeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, exprNodeFieldDesc, negativeTestMode, batchSource, exprNodeFieldDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects);
break;
default:
throw new RuntimeException("Unexpected Negative operator test mode " + negativeTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < StructFieldTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " structTypeName " + structTypeName + " outputTypeName " + outputTypeInfo.getTypeName() + " " + StructFieldTestMode.values()[v] + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i]));
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " structTypeName " + structTypeName + " outputTypeName " + outputTypeInfo.getTypeName() + " " + StructFieldTestMode.values()[v] + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
}
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource in project hive by apache.
the class TestMapJoinOperator method createExpectedTestRowMultiSet.
/*
* Simulate the join by driving the test big table data by our test small table HashMap and
* create the expected output as a multi-set of TestRow (i.e. TestRow and occurrence count).
*/
private RowTestObjectsMultiSet createExpectedTestRowMultiSet(MapJoinTestDescription testDesc, MapJoinTestData testData) throws HiveException {
RowTestObjectsMultiSet expectedTestRowMultiSet = new RowTestObjectsMultiSet();
VectorExtractRow vectorExtractRow = new VectorExtractRow();
vectorExtractRow.init(testDesc.bigTableTypeInfos);
final int bigTableColumnCount = testDesc.bigTableTypeInfos.length;
Object[] bigTableRowObjects = new Object[bigTableColumnCount];
final int bigTableKeyColumnCount = testDesc.bigTableKeyTypeInfos.length;
Object[] bigTableKeyObjects = new Object[bigTableKeyColumnCount];
VectorRandomBatchSource bigTableBatchSource = testData.getBigTableBatchSource();
VectorizedRowBatch batch = testData.getBigTableBatch();
bigTableBatchSource.resetBatchIteration();
while (bigTableBatchSource.fillNextBatch(batch)) {
final int size = testData.bigTableBatch.size;
for (int r = 0; r < size; r++) {
vectorExtractRow.extractRow(testData.bigTableBatch, r, bigTableRowObjects);
// Form key object array
// NULLs may be present in {FULL|LEFT|RIGHT} OUTER joins.
boolean hasAnyNulls = false;
for (int k = 0; k < bigTableKeyColumnCount; k++) {
int keyColumnNum = testDesc.bigTableKeyColumnNums[k];
Object keyObject = bigTableRowObjects[keyColumnNum];
if (keyObject == null) {
hasAnyNulls = true;
}
bigTableKeyObjects[k] = keyObject;
bigTableKeyObjects[k] = ((PrimitiveObjectInspector) testDesc.bigTableObjectInspectors[keyColumnNum]).copyObject(bigTableKeyObjects[k]);
}
RowTestObjects testKey = new RowTestObjects(bigTableKeyObjects);
if (testData.smallTableKeyHashMap.containsKey(testKey) && !hasAnyNulls) {
int smallTableKeyIndex = testData.smallTableKeyHashMap.get(testKey);
switch(testDesc.vectorMapJoinVariation) {
case INNER:
case OUTER:
case FULL_OUTER:
{
// One row per value.
ArrayList<RowTestObjects> valueList = testData.smallTableValues.get(smallTableKeyIndex);
final int valueCount = valueList.size();
for (int v = 0; v < valueCount; v++) {
Object[] outputObjects = new Object[testDesc.outputColumnNames.length];
addBigTableRetained(testDesc, bigTableRowObjects, outputObjects);
int outputColumnNum = testDesc.bigTableRetainColumnNums.length;
final int smallTableRetainKeyColumnNumsLength = testDesc.smallTableRetainKeyColumnNums.length;
for (int o = 0; o < smallTableRetainKeyColumnNumsLength; o++) {
outputObjects[outputColumnNum++] = bigTableKeyObjects[testDesc.smallTableRetainKeyColumnNums[o]];
}
Object[] valueRow = valueList.get(v).getRow();
final int smallTableRetainValueColumnNumsLength = testDesc.smallTableRetainValueColumnNums.length;
for (int o = 0; o < smallTableRetainValueColumnNumsLength; o++) {
outputObjects[outputColumnNum++] = valueRow[testDesc.smallTableRetainValueColumnNums[o]];
}
addToOutput(testDesc, expectedTestRowMultiSet, outputObjects, RowTestObjectsMultiSet.RowFlag.REGULAR);
}
}
break;
case INNER_BIG_ONLY:
case LEFT_SEMI:
case LEFT_ANTI:
{
Object[] outputObjects = new Object[testDesc.outputColumnNames.length];
addBigTableRetained(testDesc, bigTableRowObjects, outputObjects);
int outputColumnNum = testDesc.bigTableRetainColumnNums.length;
final int smallTableRetainKeyColumnNumsLength = testDesc.smallTableRetainKeyColumnNums.length;
for (int o = 0; o < smallTableRetainKeyColumnNumsLength; o++) {
outputObjects[outputColumnNum++] = bigTableKeyObjects[testDesc.smallTableRetainKeyColumnNums[o]];
}
addToOutput(testDesc, expectedTestRowMultiSet, outputObjects, RowTestObjectsMultiSet.RowFlag.REGULAR);
}
break;
default:
throw new RuntimeException("Unknown operator variation " + testDesc.vectorMapJoinVariation);
}
} else {
if (testDesc.vectorMapJoinVariation == VectorMapJoinVariation.OUTER || testDesc.vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER) {
// We need to add a non-match row with nulls for small table values.
Object[] outputObjects = new Object[testDesc.outputColumnNames.length];
addBigTableRetained(testDesc, bigTableRowObjects, outputObjects);
int outputColumnNum = testDesc.bigTableRetainColumnNums.length;
final int smallTableRetainKeyColumnNumsLength = testDesc.smallTableRetainKeyColumnNums.length;
for (int o = 0; o < smallTableRetainKeyColumnNumsLength; o++) {
outputObjects[outputColumnNum++] = null;
}
final int smallTableRetainValueColumnNumsLength = testDesc.smallTableRetainValueColumnNums.length;
for (int o = 0; o < smallTableRetainValueColumnNumsLength; o++) {
outputObjects[outputColumnNum++] = null;
}
addToOutput(testDesc, expectedTestRowMultiSet, outputObjects, RowTestObjectsMultiSet.RowFlag.LEFT_OUTER);
}
}
}
}
if (testDesc.vectorMapJoinVariation == VectorMapJoinVariation.FULL_OUTER) {
System.out.println("*BENCHMARK* ----------------------------------------------------------------------");
System.out.println("*BENCHMARK* FULL OUTER non-match key count " + testData.fullOuterAdditionalSmallTableKeys.size());
// Fill in non-match Small Table key results.
for (RowTestObjects smallTableKey : testData.fullOuterAdditionalSmallTableKeys) {
// System.out.println(
// "*BENCHMARK* fullOuterAdditionalSmallTableKey " + smallTableKey.toString());
int smallTableKeyIndex = testData.smallTableKeyHashMap.get(smallTableKey);
// One row per value.
ArrayList<RowTestObjects> valueList = testData.smallTableValues.get(smallTableKeyIndex);
final int valueCount = valueList.size();
for (int v = 0; v < valueCount; v++) {
Object[] outputObjects = new Object[testDesc.outputColumnNames.length];
// Non-match Small Table keys produce NULL Big Table columns.
final int bigTableRetainColumnNumsLength = testDesc.bigTableRetainColumnNums.length;
for (int o = 0; o < bigTableRetainColumnNumsLength; o++) {
outputObjects[o] = null;
}
int outputColumnNum = testDesc.bigTableRetainColumnNums.length;
// The output result may include 0, 1, or more small key columns...
Object[] smallKeyObjects = smallTableKey.getRow();
final int smallTableRetainKeyColumnNumsLength = testDesc.smallTableRetainKeyColumnNums.length;
for (int o = 0; o < smallTableRetainKeyColumnNumsLength; o++) {
outputObjects[outputColumnNum++] = smallKeyObjects[testDesc.smallTableRetainKeyColumnNums[o]];
}
Object[] valueRow = valueList.get(v).getRow();
final int smallTableRetainValueColumnNumsLength = testDesc.smallTableRetainValueColumnNums.length;
for (int o = 0; o < smallTableRetainValueColumnNumsLength; o++) {
outputObjects[outputColumnNum++] = valueRow[testDesc.smallTableRetainValueColumnNums[o]];
}
addToOutput(testDesc, expectedTestRowMultiSet, outputObjects, RowTestObjectsMultiSet.RowFlag.FULL_OUTER);
}
}
}
return expectedTestRowMultiSet;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource in project hive by apache.
the class TestMapJoinOperator method generateBigAndSmallTableRowLogLines.
private void generateBigAndSmallTableRowLogLines(MapJoinTestDescription testDesc, MapJoinTestData testData) throws HiveException {
// Generate Big Table rows log lines...
VectorExtractRow vectorExtractRow = new VectorExtractRow();
vectorExtractRow.init(testDesc.bigTableTypeInfos);
final int bigTableColumnCount = testDesc.bigTableTypeInfos.length;
Object[] bigTableRowObjects = new Object[bigTableColumnCount];
/*
PrintStream big_ps;
try {
big_ps = new PrintStream("/Users/mmccline/VecFullOuterRefresh/out_big");
} catch (Exception e) {
throw new HiveException(e);
}
*/
VectorRandomBatchSource bigTableBatchSource = testData.getBigTableBatchSource();
VectorizedRowBatch batch = testData.getBigTableBatch();
bigTableBatchSource.resetBatchIteration();
while (bigTableBatchSource.fillNextBatch(batch)) {
final int size = testData.bigTableBatch.size;
for (int r = 0; r < size; r++) {
vectorExtractRow.extractRow(testData.bigTableBatch, r, bigTableRowObjects);
// big_ps.println(rowToCsvString(bigTableRowObjects));
}
}
// big_ps.close();
/*
PrintStream small_ps;
try {
small_ps = new PrintStream("/Users/mmccline/VecFullOuterRefresh/out_small");
} catch (Exception e) {
throw new HiveException(e);
}
*/
// Generate Small Table rows log lines...
final int keyKeyColumnNumsLength = testDesc.bigTableKeyColumnNums.length;
final int smallTableRetainValueLength = testDesc.smallTableRetainValueColumnNums.length;
final int smallTableLength = keyKeyColumnNumsLength + smallTableRetainValueLength;
for (Entry<RowTestObjects, Integer> entry : testData.smallTableKeyHashMap.entrySet()) {
if (smallTableRetainValueLength == 0) {
Object[] smallTableRowObjects = entry.getKey().getRow();
// small_ps.println(rowToCsvString(smallTableRowObjects));
} else {
Integer valueIndex = entry.getValue();
ArrayList<RowTestObjects> valueList = testData.smallTableValues.get(valueIndex);
final int valueCount = valueList.size();
for (int v = 0; v < valueCount; v++) {
Object[] smallTableRowObjects = new Object[smallTableLength];
System.arraycopy(entry.getKey().getRow(), 0, smallTableRowObjects, 0, keyKeyColumnNumsLength);
int outputColumnNum = keyKeyColumnNumsLength;
Object[] valueRow = valueList.get(v).getRow();
for (int o = 0; o < smallTableRetainValueLength; o++) {
smallTableRowObjects[outputColumnNum++] = valueRow[testDesc.smallTableRetainValueColumnNums[o]];
}
// small_ps.println(rowToCsvString(smallTableRowObjects));
}
}
}
// small_ps.close();
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource in project hive by apache.
the class TestVectorDateDiff method doDateDiffTestsWithDiffColumnScalar.
private void doDateDiffTestsWithDiffColumnScalar(Random random, String dateTimeStringTypeName1, String dateTimeStringTypeName2, ColumnScalarMode columnScalarMode) throws Exception {
TypeInfo dateTimeStringTypeInfo1 = TypeInfoUtils.getTypeInfoFromTypeString(dateTimeStringTypeName1);
PrimitiveCategory dateTimeStringPrimitiveCategory1 = ((PrimitiveTypeInfo) dateTimeStringTypeInfo1).getPrimitiveCategory();
boolean isStringFamily1 = (dateTimeStringPrimitiveCategory1 == PrimitiveCategory.STRING || dateTimeStringPrimitiveCategory1 == PrimitiveCategory.CHAR || dateTimeStringPrimitiveCategory1 == PrimitiveCategory.VARCHAR);
TypeInfo dateTimeStringTypeInfo2 = TypeInfoUtils.getTypeInfoFromTypeString(dateTimeStringTypeName2);
PrimitiveCategory dateTimeStringPrimitiveCategory2 = ((PrimitiveTypeInfo) dateTimeStringTypeInfo2).getPrimitiveCategory();
boolean isStringFamily2 = (dateTimeStringPrimitiveCategory2 == PrimitiveCategory.STRING || dateTimeStringPrimitiveCategory2 == PrimitiveCategory.CHAR || dateTimeStringPrimitiveCategory2 == PrimitiveCategory.VARCHAR);
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
List<String> columns = new ArrayList<String>();
int columnNum = 1;
ExprNodeDesc col1Expr;
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.COLUMN_SCALAR) {
if (!isStringFamily1) {
generationSpecList.add(GenerationSpec.createSameType(dateTimeStringTypeInfo1));
} else {
generationSpecList.add(GenerationSpec.createStringFamilyOtherTypeValue(dateTimeStringTypeInfo1, TypeInfoFactory.dateTypeInfo));
}
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col1Expr = new ExprNodeColumnDesc(dateTimeStringTypeInfo1, columnName, "table", false);
columns.add(columnName);
} else {
Object scalar1Object;
if (!isStringFamily1) {
scalar1Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) dateTimeStringTypeInfo1);
} else {
scalar1Object = VectorRandomRowSource.randomStringFamilyOtherTypeValue(random, dateTimeStringTypeInfo1, TypeInfoFactory.dateTypeInfo, false);
}
col1Expr = new ExprNodeConstantDesc(dateTimeStringTypeInfo1, scalar1Object);
}
ExprNodeDesc col2Expr;
if (columnScalarMode == ColumnScalarMode.COLUMN_COLUMN || columnScalarMode == ColumnScalarMode.SCALAR_COLUMN) {
if (!isStringFamily2) {
generationSpecList.add(GenerationSpec.createSameType(dateTimeStringTypeInfo2));
} else {
generationSpecList.add(GenerationSpec.createStringFamilyOtherTypeValue(dateTimeStringTypeInfo2, TypeInfoFactory.dateTypeInfo));
}
explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE);
String columnName = "col" + (columnNum++);
col2Expr = new ExprNodeColumnDesc(dateTimeStringTypeInfo2, columnName, "table", false);
columns.add(columnName);
} else {
Object scalar2Object;
if (!isStringFamily2) {
scalar2Object = VectorRandomRowSource.randomPrimitiveObject(random, (PrimitiveTypeInfo) dateTimeStringTypeInfo2);
} else {
scalar2Object = VectorRandomRowSource.randomStringFamilyOtherTypeValue(random, dateTimeStringTypeInfo2, TypeInfoFactory.dateTypeInfo, false);
}
col2Expr = new ExprNodeConstantDesc(dateTimeStringTypeInfo2, scalar2Object);
}
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
children.add(col2Expr);
// ----------------------------------------------------------------------------------------------
String[] columnNames = columns.toArray(new String[0]);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
String[] outputScratchTypeNames = new String[] { "date" };
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, outputScratchTypeNames, null);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[DateDiffTestMode.count][];
for (int i = 0; i < DateDiffTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
GenericUDF udf = new GenericUDFDateDiff();
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, children);
DateDiffTestMode dateDiffTestMode = DateDiffTestMode.values()[i];
switch(dateDiffTestMode) {
case ROW_MODE:
doRowDateAddSubTest(dateTimeStringTypeInfo1, dateTimeStringTypeInfo2, columns, children, exprDesc, randomRows, columnScalarMode, rowSource.rowStructObjectInspector(), resultObjects);
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
doVectorDateAddSubTest(dateTimeStringTypeInfo1, dateTimeStringTypeInfo2, columns, rowSource.typeInfos(), children, exprDesc, dateDiffTestMode, columnScalarMode, batchSource, batchContext, resultObjects);
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + dateDiffTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < DateDiffTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " " + DateDiffTestMode.values()[v] + " " + columnScalarMode + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i]));
}
} else {
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " " + DateDiffTestMode.values()[v] + " " + columnScalarMode + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
}
}
}
}
}
Aggregations