Search in sources :

Example 76 with SemanticDispatcher

use of org.apache.hadoop.hive.ql.lib.SemanticDispatcher in project hive by apache.

the class RedundantDynamicPruningConditionsRemoval method transform.

/**
 * Transform the query tree.
 *
 * @param pctx the current parse context
 */
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    // Make sure semijoin is not enabled. If it is, then do not remove the dynamic partition pruning predicates.
    if (!pctx.getConf().getBoolVar(HiveConf.ConfVars.TEZ_DYNAMIC_SEMIJOIN_REDUCTION)) {
        Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
        opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%" + FilterOperator.getOperatorName() + "%"), new FilterTransformer());
        SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
        SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
        List<Node> topNodes = new ArrayList<Node>();
        topNodes.addAll(pctx.getTopOps().values());
        ogw.startWalking(topNodes, null);
    }
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)

Example 77 with SemanticDispatcher

use of org.apache.hadoop.hive.ql.lib.SemanticDispatcher in project hive by apache.

the class GroupByOptimizer method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    HiveConf conf = pctx.getConf();
    if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEGROUPBYSKEW)) {
        // process group-by pattern
        opRules.put(new RuleRegExp("R1", GroupByOperator.getOperatorName() + "%" + ReduceSinkOperator.getOperatorName() + "%" + GroupByOperator.getOperatorName() + "%"), getMapSortedGroupbyProc(pctx));
    } else {
        // If hive.groupby.skewindata is set to true, the operator tree is as below
        opRules.put(new RuleRegExp("R2", GroupByOperator.getOperatorName() + "%" + ReduceSinkOperator.getOperatorName() + "%" + GroupByOperator.getOperatorName() + "%" + ReduceSinkOperator.getOperatorName() + "%" + GroupByOperator.getOperatorName() + "%"), getMapSortedGroupbySkewProc(pctx));
    }
    // The dispatcher fires the processor corresponding to the closest matching
    // rule and passes the context along
    SemanticDispatcher disp = new DefaultRuleDispatcher(getDefaultProc(), opRules, new GroupByOptimizerContext(conf));
    SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
    // Create a list of topop nodes
    List<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) HiveConf(org.apache.hadoop.hive.conf.HiveConf)

Example 78 with SemanticDispatcher

use of org.apache.hadoop.hive.ql.lib.SemanticDispatcher in project hive by apache.

the class LimitPushdownOptimizer method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("R1", ReduceSinkOperator.getOperatorName() + "%" + ".*" + LimitOperator.getOperatorName() + "%"), new TopNReducer());
    opRules.put(new RuleRegExp("R2", ReduceSinkOperator.getOperatorName() + "%" + ".*" + ReduceSinkOperator.getOperatorName() + "%"), new TopNPropagator());
    LimitPushdownContext context = new LimitPushdownContext(pctx.getConf());
    SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, context);
    SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
    List<Node> topNodes = new ArrayList<Node>(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)

Example 79 with SemanticDispatcher

use of org.apache.hadoop.hive.ql.lib.SemanticDispatcher in project hive by apache.

the class SortedDynPartitionOptimizer method transform.

@Override
public ParseContext transform(ParseContext pCtx) throws SemanticException {
    // create a walker which walks the tree in a DFS manner while maintaining the
    // operator stack. The dispatcher generates the plan from the operator tree
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    String FS = FileSinkOperator.getOperatorName() + "%";
    opRules.put(new RuleRegExp("Sorted Dynamic Partition", FS), getSortDynPartProc(pCtx));
    SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
    SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pCtx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pCtx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)

Example 80 with SemanticDispatcher

use of org.apache.hadoop.hive.ql.lib.SemanticDispatcher in project hive by apache.

the class SortedMergeBucketMapJoinOptimizer method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    HiveConf conf = pctx.getConf();
    SortBucketJoinProcCtx smbJoinContext = new SortBucketJoinProcCtx(conf);
    // Get a list of joins which cannot be converted to a sort merge join
    // Only selects and filters operators are allowed between the table scan and
    // join currently. More operators can be added - the method supportAutomaticSortMergeJoin
    // dictates which operator is allowed
    getListOfRejectedJoins(pctx, smbJoinContext);
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    // go through all map joins and find out all which have enabled bucket map
    // join.
    opRules.put(new RuleRegExp("R1", MapJoinOperator.getOperatorName() + "%"), getSortedMergeBucketMapjoinProc(pctx));
    // converted to sort-merge join
    if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTO_SORTMERGE_JOIN)) {
        opRules.put(new RuleRegExp("R2", "JOIN%"), getSortedMergeJoinProc(pctx));
    }
    SemanticDispatcher disp = new DefaultRuleDispatcher(getDefaultProc(), opRules, smbJoinContext);
    SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
    // Create a list of topop nodes
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) HiveConf(org.apache.hadoop.hive.conf.HiveConf) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)

Aggregations

SemanticDispatcher (org.apache.hadoop.hive.ql.lib.SemanticDispatcher)94 Node (org.apache.hadoop.hive.ql.lib.Node)92 ArrayList (java.util.ArrayList)84 SemanticGraphWalker (org.apache.hadoop.hive.ql.lib.SemanticGraphWalker)84 DefaultRuleDispatcher (org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher)77 SemanticNodeProcessor (org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)65 LinkedHashMap (java.util.LinkedHashMap)64 SemanticRule (org.apache.hadoop.hive.ql.lib.SemanticRule)64 RuleRegExp (org.apache.hadoop.hive.ql.lib.RuleRegExp)55 DefaultGraphWalker (org.apache.hadoop.hive.ql.lib.DefaultGraphWalker)52 HashMap (java.util.HashMap)19 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)15 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)14 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)13 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 MapJoinOperator (org.apache.hadoop.hive.ql.exec.MapJoinOperator)10 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)10 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)10 Test (org.junit.Test)10 List (java.util.List)9