Search in sources :

Example 56 with OperatorDesc

use of org.apache.hadoop.hive.ql.plan.OperatorDesc in project hive by apache.

the class SparkCompiler method setInputFormat.

@Override
protected void setInputFormat(Task<? extends Serializable> task) {
    if (task instanceof SparkTask) {
        SparkWork work = ((SparkTask) task).getWork();
        List<BaseWork> all = work.getAllWork();
        for (BaseWork w : all) {
            if (w instanceof MapWork) {
                MapWork mapWork = (MapWork) w;
                HashMap<String, Operator<? extends OperatorDesc>> opMap = mapWork.getAliasToWork();
                if (!opMap.isEmpty()) {
                    for (Operator<? extends OperatorDesc> op : opMap.values()) {
                        setInputFormat(mapWork, op);
                    }
                }
            }
        }
    } else if (task instanceof ConditionalTask) {
        List<Task<? extends Serializable>> listTasks = ((ConditionalTask) task).getListTasks();
        for (Task<? extends Serializable> tsk : listTasks) {
            setInputFormat(tsk);
        }
    }
    if (task.getChildTasks() != null) {
        for (Task<? extends Serializable> childTask : task.getChildTasks()) {
            setInputFormat(childTask);
        }
    }
}
Also used : ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) UnionOperator(org.apache.hadoop.hive.ql.exec.UnionOperator) FileSinkOperator(org.apache.hadoop.hive.ql.exec.FileSinkOperator) FilterOperator(org.apache.hadoop.hive.ql.exec.FilterOperator) SMBMapJoinOperator(org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator) JoinOperator(org.apache.hadoop.hive.ql.exec.JoinOperator) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) DummyStoreOperator(org.apache.hadoop.hive.ql.exec.DummyStoreOperator) SparkTask(org.apache.hadoop.hive.ql.exec.spark.SparkTask) ConditionalTask(org.apache.hadoop.hive.ql.exec.ConditionalTask) Task(org.apache.hadoop.hive.ql.exec.Task) Serializable(java.io.Serializable) SparkTask(org.apache.hadoop.hive.ql.exec.spark.SparkTask) SparkWork(org.apache.hadoop.hive.ql.plan.SparkWork) MapWork(org.apache.hadoop.hive.ql.plan.MapWork) ConditionalTask(org.apache.hadoop.hive.ql.exec.ConditionalTask) List(java.util.List) ArrayList(java.util.ArrayList) BaseWork(org.apache.hadoop.hive.ql.plan.BaseWork) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc)

Example 57 with OperatorDesc

use of org.apache.hadoop.hive.ql.plan.OperatorDesc in project hive by apache.

the class HiveOpConverterPostProc method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    // 0. We check the conditions to apply this transformation,
    // if we do not meet them we bail out
    final boolean cboEnabled = HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVE_CBO_ENABLED);
    final boolean returnPathEnabled = HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP);
    final boolean cboSucceeded = pctx.getContext().isCboSucceeded();
    if (!(cboEnabled && returnPathEnabled && cboSucceeded)) {
        return pctx;
    }
    // 1. Initialize aux data structures
    this.pctx = pctx;
    this.aliasToOpInfo = new HashMap<String, Operator<? extends OperatorDesc>>();
    // 2. Trigger transformation
    Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
    opRules.put(new RuleRegExp("R1", JoinOperator.getOperatorName() + "%"), new JoinAnnotate());
    opRules.put(new RuleRegExp("R2", TableScanOperator.getOperatorName() + "%"), new TableScanAnnotate());
    Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
    GraphWalker ogw = new ForwardWalker(disp);
    List<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) JoinOperator(org.apache.hadoop.hive.ql.exec.JoinOperator) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) NodeProcessor(org.apache.hadoop.hive.ql.lib.NodeProcessor) ForwardWalker(org.apache.hadoop.hive.ql.lib.ForwardWalker) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) Dispatcher(org.apache.hadoop.hive.ql.lib.Dispatcher) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) LinkedHashMap(java.util.LinkedHashMap) Rule(org.apache.hadoop.hive.ql.lib.Rule) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc) GraphWalker(org.apache.hadoop.hive.ql.lib.GraphWalker)

Example 58 with OperatorDesc

use of org.apache.hadoop.hive.ql.plan.OperatorDesc in project hive by apache.

the class ColumnPrunerProcFactory method pruneJoinOperator.

private static void pruneJoinOperator(NodeProcessorCtx ctx, CommonJoinOperator op, JoinDesc conf, Map<String, ExprNodeDesc> columnExprMap, Map<Byte, List<Integer>> retainMap, boolean mapJoin) throws SemanticException {
    ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
    List<Operator<? extends OperatorDesc>> childOperators = op.getChildOperators();
    LOG.info("JOIN " + op.getIdentifier() + " oldExprs: " + conf.getExprs());
    if (cppCtx.genColLists(op) == null) {
        return;
    }
    List<FieldNode> neededColList = new ArrayList<>(cppCtx.genColLists(op));
    Map<Byte, List<FieldNode>> prunedColLists = new HashMap<>();
    for (byte tag : conf.getTagOrder()) {
        prunedColLists.put(tag, new ArrayList<FieldNode>());
    }
    // add the columns in join filters
    Set<Map.Entry<Byte, List<ExprNodeDesc>>> filters = conf.getFilters().entrySet();
    Iterator<Map.Entry<Byte, List<ExprNodeDesc>>> iter = filters.iterator();
    while (iter.hasNext()) {
        Map.Entry<Byte, List<ExprNodeDesc>> entry = iter.next();
        Byte tag = entry.getKey();
        for (ExprNodeDesc desc : entry.getValue()) {
            List<FieldNode> cols = prunedColLists.get(tag);
            cols = mergeFieldNodesWithDesc(cols, desc);
            prunedColLists.put(tag, cols);
        }
    }
    // add the columns in residual filters
    if (conf.getResidualFilterExprs() != null) {
        for (ExprNodeDesc desc : conf.getResidualFilterExprs()) {
            neededColList = mergeFieldNodesWithDesc(neededColList, desc);
        }
    }
    RowSchema joinRS = op.getSchema();
    ArrayList<String> outputCols = new ArrayList<String>();
    ArrayList<ColumnInfo> rs = new ArrayList<ColumnInfo>();
    Map<String, ExprNodeDesc> newColExprMap = new HashMap<String, ExprNodeDesc>();
    for (int i = 0; i < conf.getOutputColumnNames().size(); i++) {
        String internalName = conf.getOutputColumnNames().get(i);
        ExprNodeDesc desc = columnExprMap.get(internalName);
        Byte tag = conf.getReversedExprs().get(internalName);
        if (lookupColumn(neededColList, internalName) == null) {
            int index = conf.getExprs().get(tag).indexOf(desc);
            if (index < 0) {
                continue;
            }
            conf.getExprs().get(tag).remove(desc);
            if (retainMap != null) {
                retainMap.get(tag).remove(index);
            }
        } else {
            List<FieldNode> prunedRSList = prunedColLists.get(tag);
            if (prunedRSList == null) {
                prunedRSList = new ArrayList<>();
                prunedColLists.put(tag, prunedRSList);
            }
            prunedColLists.put(tag, mergeFieldNodesWithDesc(prunedRSList, desc));
            outputCols.add(internalName);
            newColExprMap.put(internalName, desc);
        }
    }
    if (mapJoin) {
        // regenerate the valueTableDesc
        List<TableDesc> valueTableDescs = new ArrayList<TableDesc>();
        for (int pos = 0; pos < op.getParentOperators().size(); pos++) {
            List<ExprNodeDesc> valueCols = conf.getExprs().get(Byte.valueOf((byte) pos));
            StringBuilder keyOrder = new StringBuilder();
            for (int i = 0; i < valueCols.size(); i++) {
                keyOrder.append("+");
            }
            TableDesc valueTableDesc = PlanUtils.getMapJoinValueTableDesc(PlanUtils.getFieldSchemasFromColumnList(valueCols, "mapjoinvalue"));
            valueTableDescs.add(valueTableDesc);
        }
        ((MapJoinDesc) conf).setValueTblDescs(valueTableDescs);
        Set<Map.Entry<Byte, List<ExprNodeDesc>>> exprs = ((MapJoinDesc) conf).getKeys().entrySet();
        Iterator<Map.Entry<Byte, List<ExprNodeDesc>>> iters = exprs.iterator();
        while (iters.hasNext()) {
            Map.Entry<Byte, List<ExprNodeDesc>> entry = iters.next();
            List<ExprNodeDesc> lists = entry.getValue();
            for (int j = 0; j < lists.size(); j++) {
                ExprNodeDesc desc = lists.get(j);
                Byte tag = entry.getKey();
                List<FieldNode> cols = prunedColLists.get(tag);
                cols = mergeFieldNodesWithDesc(cols, desc);
                prunedColLists.put(tag, cols);
            }
        }
    }
    for (Operator<? extends OperatorDesc> child : childOperators) {
        if (child instanceof ReduceSinkOperator) {
            boolean[] flags = getPruneReduceSinkOpRetainFlags(toColumnNames(neededColList), (ReduceSinkOperator) child);
            pruneReduceSinkOperator(flags, (ReduceSinkOperator) child, cppCtx);
        }
    }
    for (int i = 0; i < outputCols.size(); i++) {
        String internalName = outputCols.get(i);
        ColumnInfo col = joinRS.getColumnInfo(internalName);
        rs.add(col);
    }
    LOG.info("JOIN " + op.getIdentifier() + " newExprs: " + conf.getExprs());
    op.setColumnExprMap(newColExprMap);
    conf.setOutputColumnNames(outputCols);
    op.getSchema().setSignature(rs);
    cppCtx.getJoinPrunedColLists().put(op, prunedColLists);
}
Also used : LateralViewJoinOperator(org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator) ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) PTFOperator(org.apache.hadoop.hive.ql.exec.PTFOperator) GroupByOperator(org.apache.hadoop.hive.ql.exec.GroupByOperator) LateralViewForwardOperator(org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator) UnionOperator(org.apache.hadoop.hive.ql.exec.UnionOperator) UDTFOperator(org.apache.hadoop.hive.ql.exec.UDTFOperator) AbstractMapJoinOperator(org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator) SelectOperator(org.apache.hadoop.hive.ql.exec.SelectOperator) CommonJoinOperator(org.apache.hadoop.hive.ql.exec.CommonJoinOperator) FilterOperator(org.apache.hadoop.hive.ql.exec.FilterOperator) JoinOperator(org.apache.hadoop.hive.ql.exec.JoinOperator) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) LimitOperator(org.apache.hadoop.hive.ql.exec.LimitOperator) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) List(java.util.List) ArrayList(java.util.ArrayList) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) RowSchema(org.apache.hadoop.hive.ql.exec.RowSchema) MapJoinDesc(org.apache.hadoop.hive.ql.plan.MapJoinDesc) ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) TableDesc(org.apache.hadoop.hive.ql.plan.TableDesc) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc) Map(java.util.Map) HashMap(java.util.HashMap)

Example 59 with OperatorDesc

use of org.apache.hadoop.hive.ql.plan.OperatorDesc in project hive by apache.

the class ConvertJoinMapJoin method convertJoinSMBJoin.

// replaces the join operator with a new CommonJoinOperator, removes the
// parent reduce sinks
private void convertJoinSMBJoin(JoinOperator joinOp, OptimizeTezProcContext context, int mapJoinConversionPos, int numBuckets, boolean adjustParentsChildren) throws SemanticException {
    MapJoinDesc mapJoinDesc = null;
    if (adjustParentsChildren) {
        mapJoinDesc = MapJoinProcessor.getMapJoinDesc(context.conf, joinOp, joinOp.getConf().isLeftInputJoin(), joinOp.getConf().getBaseSrc(), joinOp.getConf().getMapAliases(), mapJoinConversionPos, true);
    } else {
        JoinDesc joinDesc = joinOp.getConf();
        // retain the original join desc in the map join.
        mapJoinDesc = new MapJoinDesc(MapJoinProcessor.getKeys(joinOp.getConf().isLeftInputJoin(), joinOp.getConf().getBaseSrc(), joinOp).getSecond(), null, joinDesc.getExprs(), null, null, joinDesc.getOutputColumnNames(), mapJoinConversionPos, joinDesc.getConds(), joinDesc.getFilters(), joinDesc.getNoOuterJoin(), null, joinDesc.getMemoryMonitorInfo(), joinDesc.getInMemoryDataSize());
        mapJoinDesc.setNullSafes(joinDesc.getNullSafes());
        mapJoinDesc.setFilterMap(joinDesc.getFilterMap());
        mapJoinDesc.setResidualFilterExprs(joinDesc.getResidualFilterExprs());
        // keep column expression map, explain plan uses this to display
        mapJoinDesc.setColumnExprMap(joinDesc.getColumnExprMap());
        mapJoinDesc.setReversedExprs(joinDesc.getReversedExprs());
        mapJoinDesc.resetOrder();
    }
    CommonMergeJoinOperator mergeJoinOp = (CommonMergeJoinOperator) OperatorFactory.get(joinOp.getCompilationOpContext(), new CommonMergeJoinDesc(numBuckets, mapJoinConversionPos, mapJoinDesc), joinOp.getSchema());
    int numReduceSinks = joinOp.getOpTraits().getNumReduceSinks();
    OpTraits opTraits = new OpTraits(joinOp.getOpTraits().getBucketColNames(), numBuckets, joinOp.getOpTraits().getSortCols(), numReduceSinks);
    mergeJoinOp.setOpTraits(opTraits);
    mergeJoinOp.setStatistics(joinOp.getStatistics());
    for (Operator<? extends OperatorDesc> parentOp : joinOp.getParentOperators()) {
        int pos = parentOp.getChildOperators().indexOf(joinOp);
        parentOp.getChildOperators().remove(pos);
        parentOp.getChildOperators().add(pos, mergeJoinOp);
    }
    for (Operator<? extends OperatorDesc> childOp : joinOp.getChildOperators()) {
        int pos = childOp.getParentOperators().indexOf(joinOp);
        childOp.getParentOperators().remove(pos);
        childOp.getParentOperators().add(pos, mergeJoinOp);
    }
    List<Operator<? extends OperatorDesc>> childOperators = mergeJoinOp.getChildOperators();
    List<Operator<? extends OperatorDesc>> parentOperators = mergeJoinOp.getParentOperators();
    childOperators.clear();
    parentOperators.clear();
    childOperators.addAll(joinOp.getChildOperators());
    parentOperators.addAll(joinOp.getParentOperators());
    mergeJoinOp.getConf().setGenJoinKeys(false);
    if (adjustParentsChildren) {
        mergeJoinOp.getConf().setGenJoinKeys(true);
        List<Operator<? extends OperatorDesc>> newParentOpList = new ArrayList<Operator<? extends OperatorDesc>>();
        for (Operator<? extends OperatorDesc> parentOp : mergeJoinOp.getParentOperators()) {
            for (Operator<? extends OperatorDesc> grandParentOp : parentOp.getParentOperators()) {
                grandParentOp.getChildOperators().remove(parentOp);
                grandParentOp.getChildOperators().add(mergeJoinOp);
                newParentOpList.add(grandParentOp);
            }
        }
        mergeJoinOp.getParentOperators().clear();
        mergeJoinOp.getParentOperators().addAll(newParentOpList);
        List<Operator<? extends OperatorDesc>> parentOps = new ArrayList<Operator<? extends OperatorDesc>>(mergeJoinOp.getParentOperators());
        for (Operator<? extends OperatorDesc> parentOp : parentOps) {
            int parentIndex = mergeJoinOp.getParentOperators().indexOf(parentOp);
            if (parentIndex == mapJoinConversionPos) {
                continue;
            }
            // insert the dummy store operator here
            DummyStoreOperator dummyStoreOp = new TezDummyStoreOperator(mergeJoinOp.getCompilationOpContext());
            dummyStoreOp.setParentOperators(new ArrayList<Operator<? extends OperatorDesc>>());
            dummyStoreOp.setChildOperators(new ArrayList<Operator<? extends OperatorDesc>>());
            dummyStoreOp.getChildOperators().add(mergeJoinOp);
            int index = parentOp.getChildOperators().indexOf(mergeJoinOp);
            parentOp.getChildOperators().remove(index);
            parentOp.getChildOperators().add(index, dummyStoreOp);
            dummyStoreOp.getParentOperators().add(parentOp);
            mergeJoinOp.getParentOperators().remove(parentIndex);
            mergeJoinOp.getParentOperators().add(parentIndex, dummyStoreOp);
        }
    }
    mergeJoinOp.cloneOriginalParentsList(mergeJoinOp.getParentOperators());
}
Also used : CommonMergeJoinOperator(org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator) ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) GroupByOperator(org.apache.hadoop.hive.ql.exec.GroupByOperator) FileSinkOperator(org.apache.hadoop.hive.ql.exec.FileSinkOperator) SelectOperator(org.apache.hadoop.hive.ql.exec.SelectOperator) MuxOperator(org.apache.hadoop.hive.ql.exec.MuxOperator) CommonJoinOperator(org.apache.hadoop.hive.ql.exec.CommonJoinOperator) TezDummyStoreOperator(org.apache.hadoop.hive.ql.exec.TezDummyStoreOperator) AppMasterEventOperator(org.apache.hadoop.hive.ql.exec.AppMasterEventOperator) JoinOperator(org.apache.hadoop.hive.ql.exec.JoinOperator) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) DummyStoreOperator(org.apache.hadoop.hive.ql.exec.DummyStoreOperator) MapJoinDesc(org.apache.hadoop.hive.ql.plan.MapJoinDesc) OpTraits(org.apache.hadoop.hive.ql.plan.OpTraits) TezDummyStoreOperator(org.apache.hadoop.hive.ql.exec.TezDummyStoreOperator) DummyStoreOperator(org.apache.hadoop.hive.ql.exec.DummyStoreOperator) CommonMergeJoinDesc(org.apache.hadoop.hive.ql.plan.CommonMergeJoinDesc) ArrayList(java.util.ArrayList) TezDummyStoreOperator(org.apache.hadoop.hive.ql.exec.TezDummyStoreOperator) MapJoinDesc(org.apache.hadoop.hive.ql.plan.MapJoinDesc) JoinDesc(org.apache.hadoop.hive.ql.plan.JoinDesc) CommonMergeJoinDesc(org.apache.hadoop.hive.ql.plan.CommonMergeJoinDesc) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc) CommonMergeJoinOperator(org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator)

Example 60 with OperatorDesc

use of org.apache.hadoop.hive.ql.plan.OperatorDesc in project hive by apache.

the class GenMROperator method process.

/**
 * Reduce Scan encountered.
 *
 * @param nd
 *          the reduce sink operator encountered
 * @param procCtx
 *          context
 */
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException {
    GenMRProcContext ctx = (GenMRProcContext) procCtx;
    Map<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx = ctx.getMapCurrCtx();
    GenMapRedCtx mapredCtx = mapCurrCtx.get(stack.get(stack.size() - 2));
    mapCurrCtx.put((Operator<? extends OperatorDesc>) nd, new GenMapRedCtx(mapredCtx.getCurrTask(), mapredCtx.getCurrAliasId()));
    return true;
}
Also used : Operator(org.apache.hadoop.hive.ql.exec.Operator) GenMapRedCtx(org.apache.hadoop.hive.ql.optimizer.GenMRProcContext.GenMapRedCtx) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc)

Aggregations

OperatorDesc (org.apache.hadoop.hive.ql.plan.OperatorDesc)87 Operator (org.apache.hadoop.hive.ql.exec.Operator)70 ArrayList (java.util.ArrayList)50 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)44 TableScanOperator (org.apache.hadoop.hive.ql.exec.TableScanOperator)41 MapJoinOperator (org.apache.hadoop.hive.ql.exec.MapJoinOperator)36 JoinOperator (org.apache.hadoop.hive.ql.exec.JoinOperator)31 FileSinkOperator (org.apache.hadoop.hive.ql.exec.FileSinkOperator)30 UnionOperator (org.apache.hadoop.hive.ql.exec.UnionOperator)27 Path (org.apache.hadoop.fs.Path)21 SMBMapJoinOperator (org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator)21 LinkedHashMap (java.util.LinkedHashMap)18 Serializable (java.io.Serializable)17 Task (org.apache.hadoop.hive.ql.exec.Task)17 MapWork (org.apache.hadoop.hive.ql.plan.MapWork)17 HashMap (java.util.HashMap)16 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)16 TableDesc (org.apache.hadoop.hive.ql.plan.TableDesc)16 List (java.util.List)15 Map (java.util.Map)14