Search in sources :

Example 1 with VectorMapJoinOperator

use of org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator in project hive by apache.

the class MapJoinTestConfig method createMapJoin.

public static CreateMapJoinResult createMapJoin(MapJoinTestDescription testDesc, MapJoinTestData testData, MapJoinDesc mapJoinDesc, boolean isVectorMapJoin, boolean isOriginalMapJoin, MapJoinTableContainer shareMapJoinTableContainer) throws SerDeException, IOException, HiveException {
    final Byte bigTablePos = 0;
    MapJoinTableContainerSerDe mapJoinTableContainerSerDe = MapJoinTestConfig.createMapJoinTableContainerSerDe(mapJoinDesc);
    MapJoinObjectSerDeContext valCtx = mapJoinTableContainerSerDe.getValueContext();
    MapJoinTableContainer mapJoinTableContainer = (isOriginalMapJoin ? new HashMapWrapper(testDesc.hiveConf, -1) : new MapJoinBytesTableContainer(testDesc.hiveConf, valCtx, testData.smallTableKeyHashMap.size(), 0));
    mapJoinTableContainer.setSerde(mapJoinTableContainerSerDe.getKeyContext(), mapJoinTableContainerSerDe.getValueContext());
    loadTableContainerData(testDesc, testData, mapJoinTableContainer);
    MapJoinOperator operator;
    if (!isVectorMapJoin) {
        operator = new MapJoinOperator(new CompilationOpContext());
        operator.setConf(mapJoinDesc);
    } else {
        VectorizationContext vContext = new VectorizationContext("test", testDesc.bigTableColumnNameList);
        /*
      // UNDONE: Unclear this belonds in the input VectorizationContext...
      // Create scratch columns to hold small table results.
      for (int i = 0; i < testDesc.smallTableValueTypeInfos.length; i++) {
        vContext.allocateScratchColumn(testDesc.smallTableValueTypeInfos[i]);
      }
      */
        // This is what the Vectorizer class does.
        VectorMapJoinDesc vectorMapJoinDesc = new VectorMapJoinDesc();
        byte posBigTable = (byte) mapJoinDesc.getPosBigTable();
        VectorExpression[] allBigTableKeyExpressions = vContext.getVectorExpressions(mapJoinDesc.getKeys().get(posBigTable));
        vectorMapJoinDesc.setAllBigTableKeyExpressions(allBigTableKeyExpressions);
        Map<Byte, List<ExprNodeDesc>> exprs = mapJoinDesc.getExprs();
        VectorExpression[] allBigTableValueExpressions = vContext.getVectorExpressions(exprs.get(posBigTable));
        vectorMapJoinDesc.setAllBigTableValueExpressions(allBigTableValueExpressions);
        List<ExprNodeDesc> bigTableFilters = mapJoinDesc.getFilters().get(bigTablePos);
        boolean isOuterAndFiltered = (!mapJoinDesc.isNoOuterJoin() && bigTableFilters.size() > 0);
        if (!isOuterAndFiltered) {
            operator = new VectorMapJoinOperator(new CompilationOpContext(), mapJoinDesc, vContext, vectorMapJoinDesc);
        } else {
            operator = new VectorMapJoinOuterFilteredOperator(new CompilationOpContext(), mapJoinDesc, vContext, vectorMapJoinDesc);
        }
    }
    HiveConf.setBoolVar(testDesc.hiveConf, HiveConf.ConfVars.HIVE_MAPJOIN_TESTING_NO_HASH_TABLE_LOAD, true);
    return new CreateMapJoinResult(operator, mapJoinTableContainer, mapJoinTableContainerSerDe);
}
Also used : MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) VectorMapJoinOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator) VectorMapJoinDesc(org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc) MapJoinBytesTableContainer(org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer) VectorMapJoinOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator) VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) MapJoinTableContainerSerDe(org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe) HashMapWrapper(org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext) VectorMapJoinOuterFilteredOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator) MapJoinObjectSerDeContext(org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectSerDeContext) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) List(java.util.List) ArrayList(java.util.ArrayList) MapJoinTableContainer(org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Example 2 with VectorMapJoinOperator

use of org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator in project hive by apache.

the class MapJoinTestConfig method createMapJoin.

public static MapJoinOperator createMapJoin(MapJoinTestDescription testDesc, Operator<? extends OperatorDesc> collectorOperator, MapJoinTestData testData, MapJoinDesc mapJoinDesc, boolean isVectorMapJoin, boolean isOriginalMapJoin) throws SerDeException, IOException, HiveException {
    final Byte bigTablePos = 0;
    MapJoinTableContainerSerDe mapJoinTableContainerSerDe = MapJoinTestConfig.createMapJoinTableContainerSerDe(mapJoinDesc);
    MapJoinObjectSerDeContext valCtx = mapJoinTableContainerSerDe.getValueContext();
    MapJoinTableContainer mapJoinTableContainer = (isOriginalMapJoin ? new HashMapWrapper(testDesc.hiveConf, -1) : new MapJoinBytesTableContainer(testDesc.hiveConf, valCtx, testData.smallTableKeyHashMap.size(), 0));
    mapJoinTableContainer.setSerde(mapJoinTableContainerSerDe.getKeyContext(), mapJoinTableContainerSerDe.getValueContext());
    loadTableContainerData(testDesc, testData, mapJoinTableContainer);
    MapJoinOperator operator;
    if (!isVectorMapJoin) {
        operator = new MapJoinOperator(new CompilationOpContext());
        operator.setConf(mapJoinDesc);
    } else {
        VectorizationContext vContext = new VectorizationContext("test", testDesc.bigTableColumnNamesList);
        // Create scratch columns to hold small table results.
        for (int i = 0; i < testDesc.smallTableValueTypeInfos.length; i++) {
            vContext.allocateScratchColumn(testDesc.smallTableValueTypeInfos[i]);
        }
        // This is what the Vectorizer class does.
        VectorMapJoinDesc vectorMapJoinDesc = new VectorMapJoinDesc();
        byte posBigTable = (byte) mapJoinDesc.getPosBigTable();
        VectorExpression[] allBigTableKeyExpressions = vContext.getVectorExpressions(mapJoinDesc.getKeys().get(posBigTable));
        vectorMapJoinDesc.setAllBigTableKeyExpressions(allBigTableKeyExpressions);
        Map<Byte, List<ExprNodeDesc>> exprs = mapJoinDesc.getExprs();
        VectorExpression[] allBigTableValueExpressions = vContext.getVectorExpressions(exprs.get(posBigTable));
        vectorMapJoinDesc.setAllBigTableValueExpressions(allBigTableValueExpressions);
        List<ExprNodeDesc> bigTableFilters = mapJoinDesc.getFilters().get(bigTablePos);
        boolean isOuterAndFiltered = (!mapJoinDesc.isNoOuterJoin() && bigTableFilters.size() > 0);
        if (!isOuterAndFiltered) {
            operator = new VectorMapJoinOperator(new CompilationOpContext(), mapJoinDesc, vContext, vectorMapJoinDesc);
        } else {
            operator = new VectorMapJoinOuterFilteredOperator(new CompilationOpContext(), mapJoinDesc, vContext, vectorMapJoinDesc);
        }
    }
    MapJoinTestConfig.connectOperators(testDesc, operator, collectorOperator);
    operator.setTestMapJoinTableContainer(1, mapJoinTableContainer, mapJoinTableContainerSerDe);
    return operator;
}
Also used : MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) VectorMapJoinOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator) VectorMapJoinDesc(org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc) MapJoinBytesTableContainer(org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer) VectorMapJoinOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator) VectorizationContext(org.apache.hadoop.hive.ql.exec.vector.VectorizationContext) MapJoinTableContainerSerDe(org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe) HashMapWrapper(org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper) CompilationOpContext(org.apache.hadoop.hive.ql.CompilationOpContext) VectorMapJoinOuterFilteredOperator(org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator) MapJoinObjectSerDeContext(org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectSerDeContext) VectorExpression(org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression) List(java.util.List) ArrayList(java.util.ArrayList) MapJoinTableContainer(org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc)

Aggregations

ArrayList (java.util.ArrayList)2 List (java.util.List)2 CompilationOpContext (org.apache.hadoop.hive.ql.CompilationOpContext)2 MapJoinOperator (org.apache.hadoop.hive.ql.exec.MapJoinOperator)2 HashMapWrapper (org.apache.hadoop.hive.ql.exec.persistence.HashMapWrapper)2 MapJoinBytesTableContainer (org.apache.hadoop.hive.ql.exec.persistence.MapJoinBytesTableContainer)2 MapJoinObjectSerDeContext (org.apache.hadoop.hive.ql.exec.persistence.MapJoinObjectSerDeContext)2 MapJoinTableContainer (org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer)2 MapJoinTableContainerSerDe (org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainerSerDe)2 VectorMapJoinOperator (org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator)2 VectorMapJoinOuterFilteredOperator (org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator)2 VectorizationContext (org.apache.hadoop.hive.ql.exec.vector.VectorizationContext)2 VectorExpression (org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression)2 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)2 VectorMapJoinDesc (org.apache.hadoop.hive.ql.plan.VectorMapJoinDesc)2