use of org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator in project hive by apache.
the class MapJoinTestConfig method createMapJoin.
public static CreateMapJoinResult createMapJoin(MapJoinTestDescription testDesc, MapJoinTestData testData, MapJoinDesc mapJoinDesc, boolean isVectorMapJoin, boolean isOriginalMapJoin, MapJoinTableContainer shareMapJoinTableContainer) throws SerDeException, IOException, HiveException {
final Byte bigTablePos = 0;
MapJoinTableContainerSerDe mapJoinTableContainerSerDe = MapJoinTestConfig.createMapJoinTableContainerSerDe(mapJoinDesc);
MapJoinObjectSerDeContext valCtx = mapJoinTableContainerSerDe.getValueContext();
MapJoinTableContainer mapJoinTableContainer = (isOriginalMapJoin ? new HashMapWrapper(testDesc.hiveConf, -1) : new MapJoinBytesTableContainer(testDesc.hiveConf, valCtx, testData.smallTableKeyHashMap.size(), 0));
mapJoinTableContainer.setSerde(mapJoinTableContainerSerDe.getKeyContext(), mapJoinTableContainerSerDe.getValueContext());
loadTableContainerData(testDesc, testData, mapJoinTableContainer);
MapJoinOperator operator;
if (!isVectorMapJoin) {
operator = new MapJoinOperator(new CompilationOpContext());
operator.setConf(mapJoinDesc);
} else {
VectorizationContext vContext = new VectorizationContext("test", testDesc.bigTableColumnNameList);
/*
// UNDONE: Unclear this belonds in the input VectorizationContext...
// Create scratch columns to hold small table results.
for (int i = 0; i < testDesc.smallTableValueTypeInfos.length; i++) {
vContext.allocateScratchColumn(testDesc.smallTableValueTypeInfos[i]);
}
*/
// This is what the Vectorizer class does.
VectorMapJoinDesc vectorMapJoinDesc = new VectorMapJoinDesc();
byte posBigTable = (byte) mapJoinDesc.getPosBigTable();
VectorExpression[] allBigTableKeyExpressions = vContext.getVectorExpressions(mapJoinDesc.getKeys().get(posBigTable));
vectorMapJoinDesc.setAllBigTableKeyExpressions(allBigTableKeyExpressions);
Map<Byte, List<ExprNodeDesc>> exprs = mapJoinDesc.getExprs();
VectorExpression[] allBigTableValueExpressions = vContext.getVectorExpressions(exprs.get(posBigTable));
vectorMapJoinDesc.setAllBigTableValueExpressions(allBigTableValueExpressions);
List<ExprNodeDesc> bigTableFilters = mapJoinDesc.getFilters().get(bigTablePos);
boolean isOuterAndFiltered = (!mapJoinDesc.isNoOuterJoin() && bigTableFilters.size() > 0);
if (!isOuterAndFiltered) {
operator = new VectorMapJoinOperator(new CompilationOpContext(), mapJoinDesc, vContext, vectorMapJoinDesc);
} else {
operator = new VectorMapJoinOuterFilteredOperator(new CompilationOpContext(), mapJoinDesc, vContext, vectorMapJoinDesc);
}
}
HiveConf.setBoolVar(testDesc.hiveConf, HiveConf.ConfVars.HIVE_MAPJOIN_TESTING_NO_HASH_TABLE_LOAD, true);
return new CreateMapJoinResult(operator, mapJoinTableContainer, mapJoinTableContainerSerDe);
}
use of org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator in project hive by apache.
the class MapJoinTestConfig method createMapJoin.
public static MapJoinOperator createMapJoin(MapJoinTestDescription testDesc, Operator<? extends OperatorDesc> collectorOperator, MapJoinTestData testData, MapJoinDesc mapJoinDesc, boolean isVectorMapJoin, boolean isOriginalMapJoin) throws SerDeException, IOException, HiveException {
final Byte bigTablePos = 0;
MapJoinTableContainerSerDe mapJoinTableContainerSerDe = MapJoinTestConfig.createMapJoinTableContainerSerDe(mapJoinDesc);
MapJoinObjectSerDeContext valCtx = mapJoinTableContainerSerDe.getValueContext();
MapJoinTableContainer mapJoinTableContainer = (isOriginalMapJoin ? new HashMapWrapper(testDesc.hiveConf, -1) : new MapJoinBytesTableContainer(testDesc.hiveConf, valCtx, testData.smallTableKeyHashMap.size(), 0));
mapJoinTableContainer.setSerde(mapJoinTableContainerSerDe.getKeyContext(), mapJoinTableContainerSerDe.getValueContext());
loadTableContainerData(testDesc, testData, mapJoinTableContainer);
MapJoinOperator operator;
if (!isVectorMapJoin) {
operator = new MapJoinOperator(new CompilationOpContext());
operator.setConf(mapJoinDesc);
} else {
VectorizationContext vContext = new VectorizationContext("test", testDesc.bigTableColumnNamesList);
// Create scratch columns to hold small table results.
for (int i = 0; i < testDesc.smallTableValueTypeInfos.length; i++) {
vContext.allocateScratchColumn(testDesc.smallTableValueTypeInfos[i]);
}
// This is what the Vectorizer class does.
VectorMapJoinDesc vectorMapJoinDesc = new VectorMapJoinDesc();
byte posBigTable = (byte) mapJoinDesc.getPosBigTable();
VectorExpression[] allBigTableKeyExpressions = vContext.getVectorExpressions(mapJoinDesc.getKeys().get(posBigTable));
vectorMapJoinDesc.setAllBigTableKeyExpressions(allBigTableKeyExpressions);
Map<Byte, List<ExprNodeDesc>> exprs = mapJoinDesc.getExprs();
VectorExpression[] allBigTableValueExpressions = vContext.getVectorExpressions(exprs.get(posBigTable));
vectorMapJoinDesc.setAllBigTableValueExpressions(allBigTableValueExpressions);
List<ExprNodeDesc> bigTableFilters = mapJoinDesc.getFilters().get(bigTablePos);
boolean isOuterAndFiltered = (!mapJoinDesc.isNoOuterJoin() && bigTableFilters.size() > 0);
if (!isOuterAndFiltered) {
operator = new VectorMapJoinOperator(new CompilationOpContext(), mapJoinDesc, vContext, vectorMapJoinDesc);
} else {
operator = new VectorMapJoinOuterFilteredOperator(new CompilationOpContext(), mapJoinDesc, vContext, vectorMapJoinDesc);
}
}
MapJoinTestConfig.connectOperators(testDesc, operator, collectorOperator);
operator.setTestMapJoinTableContainer(1, mapJoinTableContainer, mapJoinTableContainerSerDe);
return operator;
}
Aggregations