use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class MapJoinTestData method driveBigTableData.
public static void driveBigTableData(MapJoinTestDescription testDesc, MapJoinTestData testData, MapJoinOperator operator) throws HiveException {
VectorExtractRow vectorExtractRow = new VectorExtractRow();
vectorExtractRow.init(testDesc.bigTableTypeInfos);
Object[][] bigTableRandomRows = testData.bigTableRandomRows;
final int rowCount = bigTableRandomRows.length;
for (int i = 0; i < rowCount; i++) {
Object[] row = bigTableRandomRows[i];
operator.process(row, 0);
}
// Close the operator tree.
operator.close(false);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class TestVectorNegative method doVectorArithmeticTest.
private void doVectorArithmeticTest(TypeInfo typeInfo, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, ExprNodeGenericFuncDesc exprDesc, NegativeTestMode negativeTestMode, VectorRandomBatchSource batchSource, ObjectInspector objectInspector, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception {
HiveConf hiveConf = new HiveConf();
if (negativeTestMode == NegativeTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
vectorExpression.transientInit(hiveConf);
if (negativeTestMode == NegativeTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " negativeTestMode " + negativeTestMode + " vectorExpression " + vectorExpression.toString());
}
String[] outputScratchTypeNames = vectorizationContext.getScratchColumnTypeNames();
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, typeInfos, dataTypePhysicalVariations, /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, outputScratchTypeNames, null);
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { outputTypeInfo }, new int[] { vectorExpression.getOutputColumnNum() });
Object[] scrqtchRow = new Object[1];
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo.toString() +
" negativeTestMode " + negativeTestMode +
" vectorExpression " + vectorExpression.toString());
*/
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, objectInspector, resultObjects);
rowIndex += batch.size;
}
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class VectorBatchGenerateUtil method generateRowObjectArray.
public static Object[][] generateRowObjectArray(TypeInfo[] typeInfos, VectorBatchGenerateStream batchStream, VectorizedRowBatch batch, ObjectInspector[] objectInspectors) throws HiveException {
VectorExtractRow vectorExtractRow = new VectorExtractRow();
vectorExtractRow.init(typeInfos);
final int rowCount = batchStream.getRowCount();
final int columnCount = typeInfos.length;
Object[][] rowObjectArray = new Object[rowCount][];
Object[] row = new Object[columnCount];
int index = 0;
batchStream.reset();
while (batchStream.isNext()) {
batch.reset();
batchStream.fillNext(batch);
// Extract rows and call process per row
final int size = batch.size;
for (int r = 0; r < size; r++) {
vectorExtractRow.extractRow(batch, r, row);
Object[] resultObjectArray = new Object[columnCount];
for (int c = 0; c < columnCount; c++) {
resultObjectArray[c] = ((PrimitiveObjectInspector) objectInspectors[c]).copyObject(row[c]);
}
rowObjectArray[index++] = resultObjectArray;
}
}
return rowObjectArray;
}
Aggregations