use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class TestMapJoinOperator method generateBigAndSmallTableRowLogLines.
private void generateBigAndSmallTableRowLogLines(MapJoinTestDescription testDesc, MapJoinTestData testData) throws HiveException {
// Generate Big Table rows log lines...
VectorExtractRow vectorExtractRow = new VectorExtractRow();
vectorExtractRow.init(testDesc.bigTableTypeInfos);
final int bigTableColumnCount = testDesc.bigTableTypeInfos.length;
Object[] bigTableRowObjects = new Object[bigTableColumnCount];
/*
PrintStream big_ps;
try {
big_ps = new PrintStream("/Users/mmccline/VecFullOuterRefresh/out_big");
} catch (Exception e) {
throw new HiveException(e);
}
*/
VectorRandomBatchSource bigTableBatchSource = testData.getBigTableBatchSource();
VectorizedRowBatch batch = testData.getBigTableBatch();
bigTableBatchSource.resetBatchIteration();
while (bigTableBatchSource.fillNextBatch(batch)) {
final int size = testData.bigTableBatch.size;
for (int r = 0; r < size; r++) {
vectorExtractRow.extractRow(testData.bigTableBatch, r, bigTableRowObjects);
// big_ps.println(rowToCsvString(bigTableRowObjects));
}
}
// big_ps.close();
/*
PrintStream small_ps;
try {
small_ps = new PrintStream("/Users/mmccline/VecFullOuterRefresh/out_small");
} catch (Exception e) {
throw new HiveException(e);
}
*/
// Generate Small Table rows log lines...
final int keyKeyColumnNumsLength = testDesc.bigTableKeyColumnNums.length;
final int smallTableRetainValueLength = testDesc.smallTableRetainValueColumnNums.length;
final int smallTableLength = keyKeyColumnNumsLength + smallTableRetainValueLength;
for (Entry<RowTestObjects, Integer> entry : testData.smallTableKeyHashMap.entrySet()) {
if (smallTableRetainValueLength == 0) {
Object[] smallTableRowObjects = entry.getKey().getRow();
// small_ps.println(rowToCsvString(smallTableRowObjects));
} else {
Integer valueIndex = entry.getValue();
ArrayList<RowTestObjects> valueList = testData.smallTableValues.get(valueIndex);
final int valueCount = valueList.size();
for (int v = 0; v < valueCount; v++) {
Object[] smallTableRowObjects = new Object[smallTableLength];
System.arraycopy(entry.getKey().getRow(), 0, smallTableRowObjects, 0, keyKeyColumnNumsLength);
int outputColumnNum = keyKeyColumnNumsLength;
Object[] valueRow = valueList.get(v).getRow();
for (int o = 0; o < smallTableRetainValueLength; o++) {
smallTableRowObjects[outputColumnNum++] = valueRow[testDesc.smallTableRetainValueColumnNums[o]];
}
// small_ps.println(rowToCsvString(smallTableRowObjects));
}
}
}
// small_ps.close();
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class VectorReduceSinkObjectHashOperator method initializeOp.
@Override
protected void initializeOp(Configuration hconf) throws HiveException {
super.initializeOp(hconf);
VectorExpression.doTransientInit(reduceSinkBucketExpressions, hconf);
VectorExpression.doTransientInit(reduceSinkPartitionExpressions, hconf);
if (!isEmptyKey) {
// For this variation, we serialize the key without caring if it single Long,
// single String, multi-key, etc.
keyOutput = new Output();
keyBinarySortableSerializeWrite.set(keyOutput);
keyVectorSerializeRow = new VectorSerializeRow<BinarySortableSerializeWrite>(keyBinarySortableSerializeWrite);
keyVectorSerializeRow.init(reduceSinkKeyTypeInfos, reduceSinkKeyColumnMap);
}
if (isEmptyBuckets) {
numBuckets = 0;
} else {
numBuckets = conf.getNumBuckets();
bucketObjectInspectors = getObjectInspectorArray(reduceSinkBucketTypeInfos);
bucketVectorExtractRow = new VectorExtractRow();
bucketVectorExtractRow.init(reduceSinkBucketTypeInfos, reduceSinkBucketColumnMap);
bucketFieldValues = new Object[reduceSinkBucketTypeInfos.length];
}
if (isEmptyPartitions) {
nonPartitionRandom = new Random(12345);
} else {
partitionObjectInspectors = getObjectInspectorArray(reduceSinkPartitionTypeInfos);
partitionVectorExtractRow = new VectorExtractRow();
partitionVectorExtractRow.init(reduceSinkPartitionTypeInfos, reduceSinkPartitionColumnMap);
partitionFieldValues = new Object[reduceSinkPartitionTypeInfos.length];
}
// Set hashFunc
hashFunc = getConf().getBucketingVersion() == 2 && !vectorDesc.getIsAcidChange() ? ObjectInspectorUtils::getBucketHashCode : ObjectInspectorUtils::getBucketHashCodeOld;
// Set function to evaluate _bucket_number if needed.
if (reduceSinkKeyExpressions != null) {
for (VectorExpression ve : reduceSinkKeyExpressions) {
if (ve instanceof BucketNumExpression) {
bucketExpr = (BucketNumExpression) ve;
break;
}
}
}
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class TestVectorDateDiff method doVectorDateAddSubTest.
private void doVectorDateAddSubTest(TypeInfo dateTimeStringTypeInfo1, TypeInfo dateTimeStringTypeInfo2, List<String> columns, TypeInfo[] typeInfos, List<ExprNodeDesc> children, ExprNodeGenericFuncDesc exprDesc, DateDiffTestMode dateDiffTestMode, ColumnScalarMode columnScalarMode, VectorRandomBatchSource batchSource, VectorizedRowBatchCtx batchContext, Object[] resultObjects) throws Exception {
HiveConf hiveConf = new HiveConf();
if (dateDiffTestMode == DateDiffTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
DataTypePhysicalVariation[] dataTypePhysicalVariations = new DataTypePhysicalVariation[2];
Arrays.fill(dataTypePhysicalVariations, DataTypePhysicalVariation.NONE);
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
vectorExpression.transientInit(hiveConf);
if (dateDiffTestMode == DateDiffTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* dateTimeStringTypeInfo1 " + dateTimeStringTypeInfo1.toString() + " dateTimeStringTypeInfo2 " + dateTimeStringTypeInfo2.toString() + " dateDiffTestMode " + dateDiffTestMode + " columnScalarMode " + columnScalarMode + " vectorExpression " + vectorExpression.toString());
}
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { TypeInfoFactory.intTypeInfo }, new int[] { columns.size() });
Object[] scrqtchRow = new Object[1];
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* dateTimeStringTypeInfo1 " + dateTimeStringTypeInfo1.toString() +
" dateTimeStringTypeInfo2 " + dateTimeStringTypeInfo2.toString() +
" dateDiffTestMode " + dateDiffTestMode +
" columnScalarMode " + columnScalarMode +
" vectorExpression " + vectorExpression.toString());
*/
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, resultObjects);
rowIndex += batch.size;
}
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class TestVectorNull method doVectorCastTest.
private boolean doVectorCastTest(TypeInfo typeInfo, boolean isFilter, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, GenericUDF udf, ExprNodeGenericFuncDesc exprDesc, NullTestMode nullTestMode, VectorRandomBatchSource batchSource, ObjectInspector objectInspector, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception {
HiveConf hiveConf = new HiveConf();
if (nullTestMode == NullTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc, (isFilter ? VectorExpressionDescriptor.Mode.FILTER : VectorExpressionDescriptor.Mode.PROJECTION));
vectorExpression.transientInit(hiveConf);
if (nullTestMode == NullTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " nullTestMode " + nullTestMode + " isFilter " + isFilter + " vectorExpression " + vectorExpression.toString());
}
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* typeInfo " + typeInfo.toString() +
" nullTestMode " + nullTestMode +
" isFilter " + isFilter +
" vectorExpression " + vectorExpression.toString());
*/
VectorRandomRowSource rowSource = batchSource.getRowSource();
VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx(columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), /* dataColumnNums */
null, /* partitionColumnCount */
0, /* virtualColumnCount */
0, /* neededVirtualColumns */
null, vectorizationContext.getScratchColumnTypeNames(), vectorizationContext.getScratchDataTypePhysicalVariations());
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = null;
Object[] scrqtchRow = null;
if (!isFilter) {
resultVectorExtractRow = new VectorExtractRow();
final int outputColumnNum = vectorExpression.getOutputColumnNum();
resultVectorExtractRow.init(new TypeInfo[] { outputTypeInfo }, new int[] { outputColumnNum });
scrqtchRow = new Object[1];
}
boolean copySelectedInUse = false;
int[] copySelected = new int[VectorizedRowBatch.DEFAULT_SIZE];
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
final int originalBatchSize = batch.size;
if (isFilter) {
copySelectedInUse = batch.selectedInUse;
if (batch.selectedInUse) {
System.arraycopy(batch.selected, 0, copySelected, 0, originalBatchSize);
}
}
// In filter mode, the batch size can be made smaller.
vectorExpression.evaluate(batch);
if (!isFilter) {
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, objectInspector, resultObjects);
} else {
final int currentBatchSize = batch.size;
if (copySelectedInUse && batch.selectedInUse) {
int selectIndex = 0;
for (int i = 0; i < originalBatchSize; i++) {
final int originalBatchIndex = copySelected[i];
final boolean booleanResult;
if (selectIndex < currentBatchSize && batch.selected[selectIndex] == originalBatchIndex) {
booleanResult = true;
selectIndex++;
} else {
booleanResult = false;
}
resultObjects[rowIndex + i] = new BooleanWritable(booleanResult);
}
} else if (batch.selectedInUse) {
int selectIndex = 0;
for (int i = 0; i < originalBatchSize; i++) {
final boolean booleanResult;
if (selectIndex < currentBatchSize && batch.selected[selectIndex] == i) {
booleanResult = true;
selectIndex++;
} else {
booleanResult = false;
}
resultObjects[rowIndex + i] = new BooleanWritable(booleanResult);
}
} else if (currentBatchSize == 0) {
// Whole batch got zapped.
for (int i = 0; i < originalBatchSize; i++) {
resultObjects[rowIndex + i] = new BooleanWritable(false);
}
} else {
// Every row kept.
for (int i = 0; i < originalBatchSize; i++) {
resultObjects[rowIndex + i] = new BooleanWritable(true);
}
}
}
rowIndex += originalBatchSize;
}
return true;
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow in project hive by apache.
the class TestVectorStringConcat method doVectorStringConcatTest.
private void doVectorStringConcatTest(TypeInfo stringTypeInfo1, TypeInfo stringTypeInfo2, List<String> columns, TypeInfo[] typeInfos, List<ExprNodeDesc> children, StringConcatTestMode stringConcatTestMode, ColumnScalarMode columnScalarMode, VectorRandomBatchSource batchSource, VectorizedRowBatchCtx batchContext, ObjectInspector rowInspector, GenericUDF genericUdf, Object[] resultObjects) throws Exception {
HiveConf hiveConf = new HiveConf();
if (stringConcatTestMode == StringConcatTestMode.ADAPTOR) {
hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_TEST_VECTOR_ADAPTOR_OVERRIDE, true);
}
DataTypePhysicalVariation[] dataTypePhysicalVariations = new DataTypePhysicalVariation[2];
Arrays.fill(dataTypePhysicalVariations, DataTypePhysicalVariation.NONE);
ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, genericUdf, children);
// ---------------------------------------
// Just so we can get the output type...
ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprDesc, hiveConf);
evaluator.initialize(rowInspector);
ObjectInspector objectInspector = evaluator.getOutputOI();
TypeInfo outputTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(objectInspector);
/*
* Again with correct output type...
*/
exprDesc = new ExprNodeGenericFuncDesc(outputTypeInfo, genericUdf, children);
// ---------------------------------------
VectorizationContext vectorizationContext = new VectorizationContext("name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf);
VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprDesc);
vectorExpression.transientInit(hiveConf);
if (stringConcatTestMode == StringConcatTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) {
System.out.println("*NO NATIVE VECTOR EXPRESSION* stringTypeInfo1 " + stringTypeInfo1.toString() + " stringTypeInfo2 " + stringTypeInfo2.toString() + " stringConcatTestMode " + stringConcatTestMode + " columnScalarMode " + columnScalarMode + " vectorExpression " + vectorExpression.toString());
}
VectorizedRowBatch batch = batchContext.createVectorizedRowBatch();
VectorExtractRow resultVectorExtractRow = new VectorExtractRow();
resultVectorExtractRow.init(new TypeInfo[] { outputTypeInfo }, new int[] { columns.size() });
Object[] scrqtchRow = new Object[1];
// System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName());
/*
System.out.println(
"*DEBUG* stringTypeInfo1 " + stringTypeInfo1.toString() +
" stringTypeInfo2 " + stringTypeInfo2.toString() +
" stringConcatTestMode " + stringConcatTestMode +
" columnScalarMode " + columnScalarMode +
" vectorExpression " + vectorExpression.toString());
*/
batchSource.resetBatchIteration();
int rowIndex = 0;
while (true) {
if (!batchSource.fillNextBatch(batch)) {
break;
}
vectorExpression.evaluate(batch);
extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, objectInspector, resultObjects);
rowIndex += batch.size;
}
}
Aggregations