Search in sources :

Example 1 with DummyStoreDesc

use of org.apache.hadoop.hive.ql.plan.DummyStoreDesc in project hive by apache.

the class AbstractSMBJoinProc method convertBucketMapJoinToSMBJoin.

// Convert the bucket map-join operator to a sort-merge map join operator
protected SMBMapJoinOperator convertBucketMapJoinToSMBJoin(MapJoinOperator mapJoinOp, SortBucketJoinProcCtx smbJoinContext) {
    String[] srcs = smbJoinContext.getSrcs();
    SMBMapJoinOperator smbJop = new SMBMapJoinOperator(mapJoinOp);
    SMBJoinDesc smbJoinDesc = new SMBJoinDesc(mapJoinOp.getConf());
    smbJop.setConf(smbJoinDesc);
    HashMap<Byte, String> tagToAlias = new HashMap<Byte, String>();
    for (int i = 0; i < srcs.length; i++) {
        tagToAlias.put((byte) i, srcs[i]);
    }
    smbJoinDesc.setTagToAlias(tagToAlias);
    int indexInListMapJoinNoReducer = this.pGraphContext.getListMapJoinOpsNoReducer().indexOf(mapJoinOp);
    if (indexInListMapJoinNoReducer >= 0) {
        this.pGraphContext.getListMapJoinOpsNoReducer().remove(indexInListMapJoinNoReducer);
        this.pGraphContext.getListMapJoinOpsNoReducer().add(indexInListMapJoinNoReducer, smbJop);
    }
    Map<String, DummyStoreOperator> aliasToSink = new HashMap<String, DummyStoreOperator>();
    // For all parents (other than the big table), insert a dummy store operator
    /* Consider a query like:
     *
     * select * from
     *   (subq1 --> has a filter)
     *   join
     *   (subq2 --> has a filter)
     * on some key
     *
     * Let us assume that subq1 is the small table (either specified by the user or inferred
     * automatically). The following operator tree will be created:
     *
     * TableScan (subq1) --> Select --> Filter --> DummyStore
     *                                                         \
     *                                                          \     SMBJoin
     *                                                          /
     *                                                         /
     * TableScan (subq2) --> Select --> Filter
     */
    List<Operator<? extends OperatorDesc>> parentOperators = mapJoinOp.getParentOperators();
    for (int i = 0; i < parentOperators.size(); i++) {
        Operator<? extends OperatorDesc> par = parentOperators.get(i);
        int index = par.getChildOperators().indexOf(mapJoinOp);
        par.getChildOperators().remove(index);
        if (i == smbJoinDesc.getPosBigTable()) {
            par.getChildOperators().add(index, smbJop);
        } else {
            DummyStoreOperator dummyStoreOp = (DummyStoreOperator) OperatorFactory.get(par.getCompilationOpContext(), new DummyStoreDesc());
            par.getChildOperators().add(index, dummyStoreOp);
            List<Operator<? extends OperatorDesc>> childrenOps = new ArrayList<Operator<? extends OperatorDesc>>();
            childrenOps.add(smbJop);
            dummyStoreOp.setChildOperators(childrenOps);
            List<Operator<? extends OperatorDesc>> parentOps = new ArrayList<Operator<? extends OperatorDesc>>();
            parentOps.add(par);
            dummyStoreOp.setParentOperators(parentOps);
            aliasToSink.put(srcs[i], dummyStoreOp);
            smbJop.getParentOperators().remove(i);
            smbJop.getParentOperators().add(i, dummyStoreOp);
        }
    }
    smbJoinDesc.setAliasToSink(aliasToSink);
    List<Operator<? extends OperatorDesc>> childOps = mapJoinOp.getChildOperators();
    for (int i = 0; i < childOps.size(); i++) {
        Operator<? extends OperatorDesc> child = childOps.get(i);
        int index = child.getParentOperators().indexOf(mapJoinOp);
        child.getParentOperators().remove(index);
        child.getParentOperators().add(index, smbJop);
    }
    // Data structures coming from QBJoinTree
    smbJop.getConf().setQBJoinTreeProps(mapJoinOp.getConf());
    // 
    pGraphContext.getSmbMapJoinOps().add(smbJop);
    pGraphContext.getMapJoinOps().remove(mapJoinOp);
    return smbJop;
}
Also used : ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) SMBMapJoinOperator(org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator) JoinOperator(org.apache.hadoop.hive.ql.exec.JoinOperator) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) DummyStoreOperator(org.apache.hadoop.hive.ql.exec.DummyStoreOperator) DummyStoreDesc(org.apache.hadoop.hive.ql.plan.DummyStoreDesc) SMBJoinDesc(org.apache.hadoop.hive.ql.plan.SMBJoinDesc) HashMap(java.util.HashMap) DummyStoreOperator(org.apache.hadoop.hive.ql.exec.DummyStoreOperator) ArrayList(java.util.ArrayList) SMBMapJoinOperator(org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc)

Example 2 with DummyStoreDesc

use of org.apache.hadoop.hive.ql.plan.DummyStoreDesc in project hive by apache.

the class ConvertJoinMapJoin method convertJoinSMBJoin.

// replaces the join operator with a new CommonJoinOperator, removes the
// parent reduce sinks
private void convertJoinSMBJoin(JoinOperator joinOp, OptimizeTezProcContext context, int mapJoinConversionPos, int numBuckets, boolean adjustParentsChildren) throws SemanticException {
    MapJoinDesc mapJoinDesc = null;
    if (adjustParentsChildren) {
        mapJoinDesc = MapJoinProcessor.getMapJoinDesc(context.conf, joinOp, joinOp.getConf().isLeftInputJoin(), joinOp.getConf().getBaseSrc(), joinOp.getConf().getMapAliases(), mapJoinConversionPos, true);
    } else {
        JoinDesc joinDesc = joinOp.getConf();
        // retain the original join desc in the map join.
        mapJoinDesc = new MapJoinDesc(MapJoinProcessor.getKeys(joinOp.getConf().isLeftInputJoin(), joinOp.getConf().getBaseSrc(), joinOp).getRight(), null, joinDesc.getExprs(), null, null, joinDesc.getOutputColumnNames(), mapJoinConversionPos, joinDesc.getConds(), joinDesc.getFilters(), joinDesc.getNoOuterJoin(), null, joinDesc.getMemoryMonitorInfo(), joinDesc.getInMemoryDataSize());
        mapJoinDesc.setNullSafes(joinDesc.getNullSafes());
        mapJoinDesc.setFilterMap(joinDesc.getFilterMap());
        mapJoinDesc.setResidualFilterExprs(joinDesc.getResidualFilterExprs());
        // keep column expression map, explain plan uses this to display
        mapJoinDesc.setColumnExprMap(joinDesc.getColumnExprMap());
        mapJoinDesc.setReversedExprs(joinDesc.getReversedExprs());
        mapJoinDesc.resetOrder();
    }
    CommonMergeJoinOperator mergeJoinOp = (CommonMergeJoinOperator) OperatorFactory.get(joinOp.getCompilationOpContext(), new CommonMergeJoinDesc(numBuckets, mapJoinConversionPos, mapJoinDesc), joinOp.getSchema());
    context.parseContext.getContext().getPlanMapper().link(joinOp, mergeJoinOp);
    int numReduceSinks = joinOp.getOpTraits().getNumReduceSinks();
    OpTraits opTraits = new OpTraits(joinOp.getOpTraits().getBucketColNames(), numBuckets, joinOp.getOpTraits().getSortCols(), numReduceSinks);
    mergeJoinOp.setOpTraits(opTraits);
    mergeJoinOp.getConf().setBucketingVersion(joinOp.getConf().getBucketingVersion());
    preserveOperatorInfos(mergeJoinOp, joinOp, context);
    for (Operator<? extends OperatorDesc> parentOp : joinOp.getParentOperators()) {
        int pos = parentOp.getChildOperators().indexOf(joinOp);
        parentOp.getChildOperators().remove(pos);
        parentOp.getChildOperators().add(pos, mergeJoinOp);
    }
    for (Operator<? extends OperatorDesc> childOp : joinOp.getChildOperators()) {
        int pos = childOp.getParentOperators().indexOf(joinOp);
        childOp.getParentOperators().remove(pos);
        childOp.getParentOperators().add(pos, mergeJoinOp);
    }
    List<Operator<? extends OperatorDesc>> childOperators = mergeJoinOp.getChildOperators();
    List<Operator<? extends OperatorDesc>> parentOperators = mergeJoinOp.getParentOperators();
    childOperators.clear();
    parentOperators.clear();
    childOperators.addAll(joinOp.getChildOperators());
    parentOperators.addAll(joinOp.getParentOperators());
    mergeJoinOp.getConf().setGenJoinKeys(false);
    if (adjustParentsChildren) {
        mergeJoinOp.getConf().setGenJoinKeys(true);
        List<Operator<? extends OperatorDesc>> newParentOpList = new ArrayList<Operator<? extends OperatorDesc>>();
        for (Operator<? extends OperatorDesc> parentOp : mergeJoinOp.getParentOperators()) {
            for (Operator<? extends OperatorDesc> grandParentOp : parentOp.getParentOperators()) {
                grandParentOp.getChildOperators().remove(parentOp);
                grandParentOp.getChildOperators().add(mergeJoinOp);
                newParentOpList.add(grandParentOp);
            }
        }
        mergeJoinOp.getParentOperators().clear();
        mergeJoinOp.getParentOperators().addAll(newParentOpList);
        List<Operator<? extends OperatorDesc>> parentOps = new ArrayList<Operator<? extends OperatorDesc>>(mergeJoinOp.getParentOperators());
        for (Operator<? extends OperatorDesc> parentOp : parentOps) {
            int parentIndex = mergeJoinOp.getParentOperators().indexOf(parentOp);
            if (parentIndex == mapJoinConversionPos) {
                continue;
            }
            // during join processing, not at the time of close.
            if (parentOp instanceof GroupByOperator) {
                GroupByOperator gpbyOp = (GroupByOperator) parentOp;
                if (gpbyOp.getConf().getMode() == GroupByDesc.Mode.HASH) {
                    // No need to change for MERGE_PARTIAL etc.
                    gpbyOp.getConf().setMode(GroupByDesc.Mode.FINAL);
                }
            }
            // insert the dummy store operator here
            DummyStoreOperator dummyStoreOp = new TezDummyStoreOperator(mergeJoinOp.getCompilationOpContext());
            dummyStoreOp.setConf(new DummyStoreDesc());
            dummyStoreOp.setParentOperators(new ArrayList<Operator<? extends OperatorDesc>>());
            dummyStoreOp.setChildOperators(new ArrayList<Operator<? extends OperatorDesc>>());
            dummyStoreOp.getChildOperators().add(mergeJoinOp);
            int index = parentOp.getChildOperators().indexOf(mergeJoinOp);
            parentOp.getChildOperators().remove(index);
            parentOp.getChildOperators().add(index, dummyStoreOp);
            dummyStoreOp.getParentOperators().add(parentOp);
            mergeJoinOp.getParentOperators().remove(parentIndex);
            mergeJoinOp.getParentOperators().add(parentIndex, dummyStoreOp);
        }
    }
    mergeJoinOp.cloneOriginalParentsList(mergeJoinOp.getParentOperators());
}
Also used : CommonMergeJoinOperator(org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator) ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) GroupByOperator(org.apache.hadoop.hive.ql.exec.GroupByOperator) FileSinkOperator(org.apache.hadoop.hive.ql.exec.FileSinkOperator) SelectOperator(org.apache.hadoop.hive.ql.exec.SelectOperator) MuxOperator(org.apache.hadoop.hive.ql.exec.MuxOperator) CommonJoinOperator(org.apache.hadoop.hive.ql.exec.CommonJoinOperator) TezDummyStoreOperator(org.apache.hadoop.hive.ql.exec.TezDummyStoreOperator) AppMasterEventOperator(org.apache.hadoop.hive.ql.exec.AppMasterEventOperator) JoinOperator(org.apache.hadoop.hive.ql.exec.JoinOperator) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) DummyStoreOperator(org.apache.hadoop.hive.ql.exec.DummyStoreOperator) MapJoinDesc(org.apache.hadoop.hive.ql.plan.MapJoinDesc) GroupByOperator(org.apache.hadoop.hive.ql.exec.GroupByOperator) DummyStoreDesc(org.apache.hadoop.hive.ql.plan.DummyStoreDesc) OpTraits(org.apache.hadoop.hive.ql.plan.OpTraits) TezDummyStoreOperator(org.apache.hadoop.hive.ql.exec.TezDummyStoreOperator) DummyStoreOperator(org.apache.hadoop.hive.ql.exec.DummyStoreOperator) CommonMergeJoinDesc(org.apache.hadoop.hive.ql.plan.CommonMergeJoinDesc) ArrayList(java.util.ArrayList) TezDummyStoreOperator(org.apache.hadoop.hive.ql.exec.TezDummyStoreOperator) MapJoinDesc(org.apache.hadoop.hive.ql.plan.MapJoinDesc) JoinDesc(org.apache.hadoop.hive.ql.plan.JoinDesc) CommonMergeJoinDesc(org.apache.hadoop.hive.ql.plan.CommonMergeJoinDesc) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc) CommonMergeJoinOperator(org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator)

Aggregations

ArrayList (java.util.ArrayList)2 DummyStoreOperator (org.apache.hadoop.hive.ql.exec.DummyStoreOperator)2 JoinOperator (org.apache.hadoop.hive.ql.exec.JoinOperator)2 MapJoinOperator (org.apache.hadoop.hive.ql.exec.MapJoinOperator)2 Operator (org.apache.hadoop.hive.ql.exec.Operator)2 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)2 TableScanOperator (org.apache.hadoop.hive.ql.exec.TableScanOperator)2 DummyStoreDesc (org.apache.hadoop.hive.ql.plan.DummyStoreDesc)2 OperatorDesc (org.apache.hadoop.hive.ql.plan.OperatorDesc)2 HashMap (java.util.HashMap)1 AppMasterEventOperator (org.apache.hadoop.hive.ql.exec.AppMasterEventOperator)1 CommonJoinOperator (org.apache.hadoop.hive.ql.exec.CommonJoinOperator)1 CommonMergeJoinOperator (org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator)1 FileSinkOperator (org.apache.hadoop.hive.ql.exec.FileSinkOperator)1 GroupByOperator (org.apache.hadoop.hive.ql.exec.GroupByOperator)1 MuxOperator (org.apache.hadoop.hive.ql.exec.MuxOperator)1 SMBMapJoinOperator (org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator)1 SelectOperator (org.apache.hadoop.hive.ql.exec.SelectOperator)1 TezDummyStoreOperator (org.apache.hadoop.hive.ql.exec.TezDummyStoreOperator)1 CommonMergeJoinDesc (org.apache.hadoop.hive.ql.plan.CommonMergeJoinDesc)1