Search in sources :

Example 6 with SemanticNodeProcessor

use of org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor in project hive by apache.

the class NonBlockingOpDeDupProc method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    // 1. We apply the transformation
    String SEL = SelectOperator.getOperatorName();
    String FIL = FilterOperator.getOperatorName();
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("R1", SEL + "%" + SEL + "%"), new SelectDedup(pctx));
    opRules.put(new RuleRegExp("R2", FIL + "%" + FIL + "%"), new FilterDedup());
    SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
    SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
    List<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)

Example 7 with SemanticNodeProcessor

use of org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor in project hive by apache.

the class OrderlessLimitPushDownOptimizer method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("LIMIT push down", LimitOperator.getOperatorName() + "%"), new LimitPushDown());
    SemanticGraphWalker walker = new DefaultGraphWalker(new DefaultRuleDispatcher(null, opRules, null));
    walker.startWalking(new ArrayList<>(pctx.getTopOps().values()), null);
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) LinkedHashMap(java.util.LinkedHashMap)

Example 8 with SemanticNodeProcessor

use of org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor in project hive by apache.

the class PrunerUtils method walkOperatorTree.

/**
 * Walk operator tree for pruner generation.
 *
 * @param pctx
 * @param opWalkerCtx
 * @param filterProc
 * @param defaultProc
 * @throws SemanticException
 */
public static void walkOperatorTree(ParseContext pctx, NodeProcessorCtx opWalkerCtx, SemanticNodeProcessor filterProc, SemanticNodeProcessor defaultProc) throws SemanticException {
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    // Build regular expression for operator rule.
    // "(TS%FIL%)|(TS%FIL%FIL%)"
    String tsOprName = TableScanOperator.getOperatorName();
    String filtOprName = FilterOperator.getOperatorName();
    opRules.put(new RuleExactMatch("R1", new String[] { tsOprName, filtOprName, filtOprName }), filterProc);
    opRules.put(new RuleExactMatch("R2", new String[] { tsOprName, filtOprName }), filterProc);
    // The dispatcher fires the processor corresponding to the closest matching
    // rule and passes the context along
    SemanticDispatcher disp = new DefaultRuleDispatcher(defaultProc, opRules, opWalkerCtx);
    SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
    // Create a list of topop nodes
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) RuleExactMatch(org.apache.hadoop.hive.ql.lib.RuleExactMatch) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)

Example 9 with SemanticNodeProcessor

use of org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor in project hive by apache.

the class PrunerUtils method walkExprTree.

/**
 * Walk expression tree for pruner generation.
 *
 * @param pred
 * @param ctx
 * @param colProc
 * @param fieldProc
 * @param genFuncProc
 * @param defProc
 * @return
 * @throws SemanticException
 */
public static Map<Node, Object> walkExprTree(ExprNodeDesc pred, NodeProcessorCtx ctx, SemanticNodeProcessor colProc, SemanticNodeProcessor fieldProc, SemanticNodeProcessor genFuncProc, SemanticNodeProcessor defProc) throws SemanticException {
    // create a walker which walks the tree in a DFS manner while maintaining
    // the operator stack. The dispatcher
    // generates the plan from the operator tree
    Map<SemanticRule, SemanticNodeProcessor> exprRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    exprRules.put(new TypeRule(ExprNodeColumnDesc.class), colProc);
    exprRules.put(new TypeRule(ExprNodeFieldDesc.class), fieldProc);
    exprRules.put(new TypeRule(ExprNodeGenericFuncDesc.class), genFuncProc);
    // The dispatcher fires the processor corresponding to the closest matching
    // rule and passes the context along
    SemanticDispatcher disp = new DefaultRuleDispatcher(defProc, exprRules, ctx);
    SemanticGraphWalker egw = new DefaultGraphWalker(disp);
    List<Node> startNodes = new ArrayList<Node>();
    startNodes.add(pred);
    HashMap<Node, Object> outputMap = new HashMap<Node, Object>();
    egw.startWalking(startNodes, outputMap);
    return outputMap;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) ArrayList(java.util.ArrayList) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) ExprNodeFieldDesc(org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) TypeRule(org.apache.hadoop.hive.ql.lib.TypeRule)

Example 10 with SemanticNodeProcessor

use of org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor in project hive by apache.

the class SparkReduceSinkMapJoinProc method hasGroupBy.

private boolean hasGroupBy(Operator<? extends OperatorDesc> mapjoinOp, GenSparkProcContext context) throws SemanticException {
    List<Operator<? extends OperatorDesc>> childOps = mapjoinOp.getChildOperators();
    Map<SemanticRule, SemanticNodeProcessor> rules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    SparkMapJoinFollowedByGroupByProcessor processor = new SparkMapJoinFollowedByGroupByProcessor();
    rules.put(new RuleRegExp("GBY", GroupByOperator.getOperatorName() + "%"), processor);
    SemanticDispatcher disp = new DefaultRuleDispatcher(null, rules, context);
    SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(childOps);
    ogw.startWalking(topNodes, null);
    return processor.getHasGroupBy();
}
Also used : ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) GroupByOperator(org.apache.hadoop.hive.ql.exec.GroupByOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) HashTableDummyOperator(org.apache.hadoop.hive.ql.exec.HashTableDummyOperator) SparkHashTableSinkOperator(org.apache.hadoop.hive.ql.exec.SparkHashTableSinkOperator) SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc)

Aggregations

SemanticNodeProcessor (org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)68 LinkedHashMap (java.util.LinkedHashMap)66 Node (org.apache.hadoop.hive.ql.lib.Node)66 SemanticGraphWalker (org.apache.hadoop.hive.ql.lib.SemanticGraphWalker)66 SemanticRule (org.apache.hadoop.hive.ql.lib.SemanticRule)66 SemanticDispatcher (org.apache.hadoop.hive.ql.lib.SemanticDispatcher)65 DefaultRuleDispatcher (org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher)63 ArrayList (java.util.ArrayList)62 RuleRegExp (org.apache.hadoop.hive.ql.lib.RuleRegExp)57 DefaultGraphWalker (org.apache.hadoop.hive.ql.lib.DefaultGraphWalker)36 MapJoinOperator (org.apache.hadoop.hive.ql.exec.MapJoinOperator)11 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)11 JoinOperator (org.apache.hadoop.hive.ql.exec.JoinOperator)9 Operator (org.apache.hadoop.hive.ql.exec.Operator)9 FileSinkOperator (org.apache.hadoop.hive.ql.exec.FileSinkOperator)8 ForwardWalker (org.apache.hadoop.hive.ql.lib.ForwardWalker)8 TableScanOperator (org.apache.hadoop.hive.ql.exec.TableScanOperator)7 UnionOperator (org.apache.hadoop.hive.ql.exec.UnionOperator)7 AppMasterEventOperator (org.apache.hadoop.hive.ql.exec.AppMasterEventOperator)6 DummyStoreOperator (org.apache.hadoop.hive.ql.exec.DummyStoreOperator)6