Search in sources :

Example 1 with LevelOrderWalker

use of org.apache.hadoop.hive.ql.lib.LevelOrderWalker in project hive by apache.

the class PredicateTransitivePropagate method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    pGraphContext = pctx;
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("R1", "(" + FilterOperator.getOperatorName() + "%" + ReduceSinkOperator.getOperatorName() + "%" + JoinOperator.getOperatorName() + "%)"), new JoinTransitive());
    // The dispatcher fires the processor corresponding to the closest matching
    // rule and passes the context along
    TransitiveContext context = new TransitiveContext();
    SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, context);
    SemanticGraphWalker ogw = new LevelOrderWalker(disp, 2);
    // Create a list of topop nodes
    List<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pGraphContext.getTopOps().values());
    ogw.startWalking(topNodes, null);
    Map<ReduceSinkOperator, List<ExprNodeDesc>> newFilters = context.getNewfilters();
    // insert new filter between RS and parent of RS
    for (Map.Entry<ReduceSinkOperator, List<ExprNodeDesc>> entry : newFilters.entrySet()) {
        ReduceSinkOperator reducer = entry.getKey();
        Operator<?> parent = reducer.getParentOperators().get(0);
        List<ExprNodeDesc> exprs = entry.getValue();
        if (parent instanceof FilterOperator) {
            exprs = ExprNodeDescUtils.split(((FilterOperator) parent).getConf().getPredicate(), exprs);
            ExprNodeDesc merged = ExprNodeDescUtils.mergePredicates(exprs);
            ((FilterOperator) parent).getConf().setPredicate(merged);
        } else {
            ExprNodeDesc merged = ExprNodeDescUtils.mergePredicates(exprs);
            RowSchema parentRS = parent.getSchema();
            Operator<FilterDesc> newFilter = createFilter(reducer, parent, parentRS, merged);
        }
    }
    return pGraphContext;
}
Also used : Node(org.apache.hadoop.hive.ql.lib.Node) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) ArrayList(java.util.ArrayList) List(java.util.List) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) RowSchema(org.apache.hadoop.hive.ql.exec.RowSchema) SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) FilterOperator(org.apache.hadoop.hive.ql.exec.FilterOperator) FilterDesc(org.apache.hadoop.hive.ql.plan.FilterDesc) ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) LevelOrderWalker(org.apache.hadoop.hive.ql.lib.LevelOrderWalker) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map)

Example 2 with LevelOrderWalker

use of org.apache.hadoop.hive.ql.lib.LevelOrderWalker in project hive by apache.

the class AnnotateWithOpTraits method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    AnnotateOpTraitsProcCtx annotateCtx = new AnnotateOpTraitsProcCtx(pctx);
    // create a walker which walks the tree in a BFS manner while maintaining the
    // operator stack. The dispatcher generates the plan from the operator tree
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("TS", TableScanOperator.getOperatorName() + "%"), OpTraitsRulesProcFactory.getTableScanRule());
    opRules.put(new RuleRegExp("RS", ReduceSinkOperator.getOperatorName() + "%"), OpTraitsRulesProcFactory.getReduceSinkRule());
    opRules.put(new RuleRegExp("JOIN", JoinOperator.getOperatorName() + "%"), OpTraitsRulesProcFactory.getJoinRule());
    opRules.put(new RuleRegExp("MAPJOIN", MapJoinOperator.getOperatorName() + "%"), OpTraitsRulesProcFactory.getMultiParentRule());
    opRules.put(new RuleRegExp("SMB", SMBMapJoinOperator.getOperatorName() + "%"), OpTraitsRulesProcFactory.getMultiParentRule());
    opRules.put(new RuleRegExp("MUX", MuxOperator.getOperatorName() + "%"), OpTraitsRulesProcFactory.getMultiParentRule());
    opRules.put(new RuleRegExp("DEMUX", DemuxOperator.getOperatorName() + "%"), OpTraitsRulesProcFactory.getMultiParentRule());
    opRules.put(new RuleRegExp("UNION", UnionOperator.getOperatorName() + "%"), OpTraitsRulesProcFactory.getMultiParentRule());
    opRules.put(new RuleRegExp("GBY", GroupByOperator.getOperatorName() + "%"), OpTraitsRulesProcFactory.getGroupByRule());
    opRules.put(new RuleRegExp("PTF", PTFOperator.getOperatorName() + "%"), OpTraitsRulesProcFactory.getPTFRule());
    opRules.put(new RuleRegExp("SEL", SelectOperator.getOperatorName() + "%"), OpTraitsRulesProcFactory.getSelectRule());
    // The dispatcher fires the processor corresponding to the closest matching
    // rule and passes the context along
    SemanticDispatcher disp = new DefaultRuleDispatcher(OpTraitsRulesProcFactory.getDefaultRule(), opRules, annotateCtx);
    SemanticGraphWalker ogw = new LevelOrderWalker(disp, 0);
    // Create a list of topop nodes
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) LevelOrderWalker(org.apache.hadoop.hive.ql.lib.LevelOrderWalker)

Example 3 with LevelOrderWalker

use of org.apache.hadoop.hive.ql.lib.LevelOrderWalker in project hive by apache.

the class Generator method transform.

/* (non-Javadoc)
   * @see org.apache.hadoop.hive.ql.optimizer.Transform#transform(org.apache.hadoop.hive.ql.parse.ParseContext)
   */
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    if (hooks != null && hooks.contains(ATLAS_HOOK_CLASSNAME)) {
        // Atlas would be interested in lineage information for insert,load,create etc.
        if (!pctx.getQueryProperties().isCTAS() && !pctx.getQueryProperties().isMaterializedView() && pctx.getQueryProperties().isQuery() && pctx.getCreateTable() == null && pctx.getCreateViewDesc() == null && (pctx.getLoadTableWork() == null || pctx.getLoadTableWork().isEmpty())) {
            LOG.debug("Not evaluating lineage");
            return pctx;
        }
    }
    Index index = pctx.getQueryState().getLineageState().getIndex();
    if (index == null) {
        index = new Index();
    }
    long sTime = System.currentTimeMillis();
    // Create the lineage context
    LineageCtx lCtx = new LineageCtx(pctx, index);
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%"), OpProcFactory.getTSProc());
    opRules.put(new RuleRegExp("R2", ScriptOperator.getOperatorName() + "%"), OpProcFactory.getTransformProc());
    opRules.put(new RuleRegExp("R3", UDTFOperator.getOperatorName() + "%"), OpProcFactory.getTransformProc());
    opRules.put(new RuleRegExp("R4", SelectOperator.getOperatorName() + "%"), OpProcFactory.getSelProc());
    opRules.put(new RuleRegExp("R5", GroupByOperator.getOperatorName() + "%"), OpProcFactory.getGroupByProc());
    opRules.put(new RuleRegExp("R6", UnionOperator.getOperatorName() + "%"), OpProcFactory.getUnionProc());
    opRules.put(new RuleRegExp("R7", CommonJoinOperator.getOperatorName() + "%|" + MapJoinOperator.getOperatorName() + "%"), OpProcFactory.getJoinProc());
    opRules.put(new RuleRegExp("R8", ReduceSinkOperator.getOperatorName() + "%"), OpProcFactory.getReduceSinkProc());
    opRules.put(new RuleRegExp("R9", LateralViewJoinOperator.getOperatorName() + "%"), OpProcFactory.getLateralViewJoinProc());
    opRules.put(new RuleRegExp("R10", PTFOperator.getOperatorName() + "%"), OpProcFactory.getTransformProc());
    opRules.put(new RuleRegExp("R11", FilterOperator.getOperatorName() + "%"), OpProcFactory.getFilterProc());
    // The dispatcher fires the processor corresponding to the closest matching rule and passes the context along
    SemanticDispatcher disp = new DefaultRuleDispatcher(OpProcFactory.getDefaultProc(), opRules, lCtx);
    SemanticGraphWalker ogw = new LevelOrderWalker(disp, 2);
    // Create a list of topop nodes
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    LOG.debug("Time taken for lineage transform={}", (System.currentTimeMillis() - sTime));
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) Index(org.apache.hadoop.hive.ql.optimizer.lineage.LineageCtx.Index) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) LevelOrderWalker(org.apache.hadoop.hive.ql.lib.LevelOrderWalker)

Example 4 with LevelOrderWalker

use of org.apache.hadoop.hive.ql.lib.LevelOrderWalker in project hive by apache.

the class UnionProcessor method transform.

/**
 * Transform the query tree. For each union, store the fact whether both the
 * sub-queries are map-only
 *
 * @param pCtx
 *          the current parse context
 */
public ParseContext transform(ParseContext pCtx) throws SemanticException {
    // create a walker which walks the tree in a BFS manner while maintaining
    // the operator stack.
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("R1", ReduceSinkOperator.getOperatorName() + "%.*" + UnionOperator.getOperatorName() + "%"), UnionProcFactory.getMapRedUnion());
    opRules.put(new RuleRegExp("R2", UnionOperator.getOperatorName() + "%.*" + UnionOperator.getOperatorName() + "%"), UnionProcFactory.getUnknownUnion());
    opRules.put(new RuleRegExp("R3", TableScanOperator.getOperatorName() + "%.*" + UnionOperator.getOperatorName() + "%"), UnionProcFactory.getMapUnion());
    // The dispatcher fires the processor for the matching rule and passes the
    // context along
    UnionProcContext uCtx = new UnionProcContext();
    uCtx.setParseContext(pCtx);
    SemanticDispatcher disp = new DefaultRuleDispatcher(UnionProcFactory.getNoUnion(), opRules, uCtx);
    LevelOrderWalker ogw = new LevelOrderWalker(disp);
    ogw.setNodeTypes(UnionOperator.class);
    // Create a list of topop nodes
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pCtx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    pCtx.setUCtx(uCtx);
    // Walk the tree again to see if the union can be removed completely
    HiveConf conf = pCtx.getConf();
    opRules.clear();
    if (conf.getBoolVar(HiveConf.ConfVars.HIVE_OPTIMIZE_UNION_REMOVE) && !conf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("spark")) {
        opRules.put(new RuleRegExp("R5", UnionOperator.getOperatorName() + "%" + ".*" + FileSinkOperator.getOperatorName() + "%"), UnionProcFactory.getUnionNoProcessFile());
        disp = new DefaultRuleDispatcher(UnionProcFactory.getNoUnion(), opRules, uCtx);
        ogw = new LevelOrderWalker(disp);
        ogw.setNodeTypes(FileSinkOperator.class);
        // Create a list of topop nodes
        topNodes.clear();
        topNodes.addAll(pCtx.getTopOps().values());
        ogw.startWalking(topNodes, null);
    }
    return pCtx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) HiveConf(org.apache.hadoop.hive.conf.HiveConf) LevelOrderWalker(org.apache.hadoop.hive.ql.lib.LevelOrderWalker)

Example 5 with LevelOrderWalker

use of org.apache.hadoop.hive.ql.lib.LevelOrderWalker in project hive by apache.

the class AnnotateWithStatistics method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    AnnotateStatsProcCtx aspCtx = new AnnotateStatsProcCtx(pctx);
    // create a walker which walks the tree in a BFS manner while maintaining the
    // operator stack. The dispatcher generates the plan from the operator tree
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("TS", TableScanOperator.getOperatorName() + "%"), StatsRulesProcFactory.getTableScanRule());
    opRules.put(new RuleRegExp("SEL", SelectOperator.getOperatorName() + "%"), StatsRulesProcFactory.getSelectRule());
    opRules.put(new RuleRegExp("FIL", FilterOperator.getOperatorName() + "%"), StatsRulesProcFactory.getFilterRule());
    opRules.put(new RuleRegExp("GBY", GroupByOperator.getOperatorName() + "%"), StatsRulesProcFactory.getGroupByRule());
    opRules.put(new RuleRegExp("JOIN", CommonJoinOperator.getOperatorName() + "%|" + MapJoinOperator.getOperatorName() + "%"), StatsRulesProcFactory.getJoinRule());
    opRules.put(new RuleRegExp("LIM", LimitOperator.getOperatorName() + "%"), StatsRulesProcFactory.getLimitRule());
    opRules.put(new RuleRegExp("RS", ReduceSinkOperator.getOperatorName() + "%"), StatsRulesProcFactory.getReduceSinkRule());
    opRules.put(new RuleRegExp("UDTF", UDTFOperator.getOperatorName() + "%"), StatsRulesProcFactory.getUDTFRule());
    opRules.put(new RuleRegExp("LVJ", LateralViewJoinOperator.getOperatorName() + "%"), StatsRulesProcFactory.getLateralViewJoinRule());
    // The dispatcher fires the processor corresponding to the closest matching
    // rule and passes the context along
    SemanticDispatcher disp = new DefaultRuleDispatcher(StatsRulesProcFactory.getDefaultRule(), opRules, aspCtx);
    SemanticGraphWalker ogw = new LevelOrderWalker(disp, 0);
    // Create a list of topop nodes
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) LevelOrderWalker(org.apache.hadoop.hive.ql.lib.LevelOrderWalker)

Aggregations

ArrayList (java.util.ArrayList)5 LinkedHashMap (java.util.LinkedHashMap)5 DefaultRuleDispatcher (org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher)5 LevelOrderWalker (org.apache.hadoop.hive.ql.lib.LevelOrderWalker)5 Node (org.apache.hadoop.hive.ql.lib.Node)5 RuleRegExp (org.apache.hadoop.hive.ql.lib.RuleRegExp)5 SemanticDispatcher (org.apache.hadoop.hive.ql.lib.SemanticDispatcher)5 SemanticNodeProcessor (org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)5 SemanticRule (org.apache.hadoop.hive.ql.lib.SemanticRule)5 SemanticGraphWalker (org.apache.hadoop.hive.ql.lib.SemanticGraphWalker)4 HashMap (java.util.HashMap)1 List (java.util.List)1 Map (java.util.Map)1 HiveConf (org.apache.hadoop.hive.conf.HiveConf)1 FilterOperator (org.apache.hadoop.hive.ql.exec.FilterOperator)1 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)1 RowSchema (org.apache.hadoop.hive.ql.exec.RowSchema)1 Index (org.apache.hadoop.hive.ql.optimizer.lineage.LineageCtx.Index)1 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)1 FilterDesc (org.apache.hadoop.hive.ql.plan.FilterDesc)1