use of org.apache.hadoop.hive.ql.lib.SemanticRule in project hive by apache.
the class NonBlockingOpDeDupProc method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
// 1. We apply the transformation
String SEL = SelectOperator.getOperatorName();
String FIL = FilterOperator.getOperatorName();
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
opRules.put(new RuleRegExp("R1", SEL + "%" + SEL + "%"), new SelectDedup(pctx));
opRules.put(new RuleRegExp("R2", FIL + "%" + FIL + "%"), new FilterDedup());
SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.SemanticRule in project hive by apache.
the class OrderlessLimitPushDownOptimizer method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
opRules.put(new RuleRegExp("LIMIT push down", LimitOperator.getOperatorName() + "%"), new LimitPushDown());
SemanticGraphWalker walker = new DefaultGraphWalker(new DefaultRuleDispatcher(null, opRules, null));
walker.startWalking(new ArrayList<>(pctx.getTopOps().values()), null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.SemanticRule in project hive by apache.
the class PrunerUtils method walkOperatorTree.
/**
* Walk operator tree for pruner generation.
*
* @param pctx
* @param opWalkerCtx
* @param filterProc
* @param defaultProc
* @throws SemanticException
*/
public static void walkOperatorTree(ParseContext pctx, NodeProcessorCtx opWalkerCtx, SemanticNodeProcessor filterProc, SemanticNodeProcessor defaultProc) throws SemanticException {
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
// Build regular expression for operator rule.
// "(TS%FIL%)|(TS%FIL%FIL%)"
String tsOprName = TableScanOperator.getOperatorName();
String filtOprName = FilterOperator.getOperatorName();
opRules.put(new RuleExactMatch("R1", new String[] { tsOprName, filtOprName, filtOprName }), filterProc);
opRules.put(new RuleExactMatch("R2", new String[] { tsOprName, filtOprName }), filterProc);
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
SemanticDispatcher disp = new DefaultRuleDispatcher(defaultProc, opRules, opWalkerCtx);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
// Create a list of topop nodes
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
}
use of org.apache.hadoop.hive.ql.lib.SemanticRule in project hive by apache.
the class PrunerUtils method walkExprTree.
/**
* Walk expression tree for pruner generation.
*
* @param pred
* @param ctx
* @param colProc
* @param fieldProc
* @param genFuncProc
* @param defProc
* @return
* @throws SemanticException
*/
public static Map<Node, Object> walkExprTree(ExprNodeDesc pred, NodeProcessorCtx ctx, SemanticNodeProcessor colProc, SemanticNodeProcessor fieldProc, SemanticNodeProcessor genFuncProc, SemanticNodeProcessor defProc) throws SemanticException {
// create a walker which walks the tree in a DFS manner while maintaining
// the operator stack. The dispatcher
// generates the plan from the operator tree
Map<SemanticRule, SemanticNodeProcessor> exprRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
exprRules.put(new TypeRule(ExprNodeColumnDesc.class), colProc);
exprRules.put(new TypeRule(ExprNodeFieldDesc.class), fieldProc);
exprRules.put(new TypeRule(ExprNodeGenericFuncDesc.class), genFuncProc);
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
SemanticDispatcher disp = new DefaultRuleDispatcher(defProc, exprRules, ctx);
SemanticGraphWalker egw = new DefaultGraphWalker(disp);
List<Node> startNodes = new ArrayList<Node>();
startNodes.add(pred);
HashMap<Node, Object> outputMap = new HashMap<Node, Object>();
egw.startWalking(startNodes, outputMap);
return outputMap;
}
use of org.apache.hadoop.hive.ql.lib.SemanticRule in project hive by apache.
the class SparkReduceSinkMapJoinProc method hasGroupBy.
private boolean hasGroupBy(Operator<? extends OperatorDesc> mapjoinOp, GenSparkProcContext context) throws SemanticException {
List<Operator<? extends OperatorDesc>> childOps = mapjoinOp.getChildOperators();
Map<SemanticRule, SemanticNodeProcessor> rules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
SparkMapJoinFollowedByGroupByProcessor processor = new SparkMapJoinFollowedByGroupByProcessor();
rules.put(new RuleRegExp("GBY", GroupByOperator.getOperatorName() + "%"), processor);
SemanticDispatcher disp = new DefaultRuleDispatcher(null, rules, context);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(childOps);
ogw.startWalking(topNodes, null);
return processor.getHasGroupBy();
}
Aggregations