use of org.apache.hadoop.hive.ql.exec.vector.VectorCopyRow in project hive by apache.
the class VectorMapJoinCommonOperator method initializeOp.
@Override
protected void initializeOp(Configuration hconf) throws HiveException {
super.initializeOp(hconf);
/*
* Get configuration parameters.
*/
overflowRepeatedThreshold = HiveConf.getIntVar(hconf, HiveConf.ConfVars.HIVE_VECTORIZATION_MAPJOIN_NATIVE_OVERFLOW_REPEATED_THRESHOLD);
useOverflowRepeatedThreshold = (overflowRepeatedThreshold >= 0);
/*
* Create our vectorized copy row and deserialize row helper objects.
*/
if (smallTableMapping.getCount() > 0) {
smallTableVectorDeserializeRow = new VectorDeserializeRow<LazyBinaryDeserializeRead>(new LazyBinaryDeserializeRead(smallTableMapping.getTypeInfos(), /* useExternalBuffer */
true));
smallTableVectorDeserializeRow.init(smallTableMapping.getOutputColumns());
}
if (bigTableRetainedMapping.getCount() > 0) {
bigTableRetainedVectorCopy = new VectorCopyRow();
bigTableRetainedVectorCopy.init(bigTableRetainedMapping);
}
if (bigTableOuterKeyMapping.getCount() > 0) {
bigTableVectorCopyOuterKeys = new VectorCopyRow();
bigTableVectorCopyOuterKeys.init(bigTableOuterKeyMapping);
}
/*
* Setup the overflow batch.
*/
overflowBatch = setupOverflowBatch();
needCommonSetup = true;
needHashTableSetup = true;
if (isLogDebugEnabled) {
int[] currentScratchColumns = vOutContext.currentScratchColumns();
LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator initializeOp currentScratchColumns " + Arrays.toString(currentScratchColumns));
StructObjectInspector structOutputObjectInspector = (StructObjectInspector) outputObjInspector;
List<? extends StructField> fields = structOutputObjectInspector.getAllStructFieldRefs();
int i = 0;
for (StructField field : fields) {
LOG.debug(getLoggingPrefix() + " VectorMapJoinCommonOperator initializeOp " + i + " field " + field.getFieldName() + " type " + field.getFieldObjectInspector().getTypeName());
i++;
}
}
}
Aggregations