use of org.apache.hadoop.hive.ql.lib.Dispatcher in project hive by apache.
the class SparkCompiler method runDynamicPartitionPruning.
private void runDynamicPartitionPruning(OptimizeSparkProcContext procCtx) throws SemanticException {
if (!conf.isSparkDPPAny()) {
return;
}
ParseContext parseContext = procCtx.getParseContext();
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
opRules.put(new RuleRegExp(new String("Dynamic Partition Pruning"), FilterOperator.getOperatorName() + "%"), new DynamicPartitionPruningOptimization());
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
Dispatcher disp = new DefaultRuleDispatcher(null, opRules, procCtx);
GraphWalker ogw = new ForwardWalker(disp);
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(parseContext.getTopOps().values());
ogw.startWalking(topNodes, null);
}
use of org.apache.hadoop.hive.ql.lib.Dispatcher in project hive by apache.
the class ExprProcFactory method getExprDependency.
/**
* Gets the expression dependencies for the expression.
*
* @param lctx
* The lineage context containing the input operators dependencies.
* @param inpOp
* The input operator to the current operator.
* @param expr
* The expression that is being processed.
* @param outputMap
* @throws SemanticException
*/
public static Dependency getExprDependency(LineageCtx lctx, Operator<? extends OperatorDesc> inpOp, ExprNodeDesc expr, HashMap<Node, Object> outputMap) throws SemanticException {
outputMap.clear();
// Create the walker, the rules dispatcher and the context.
ExprProcCtx exprCtx = new ExprProcCtx(lctx, inpOp);
// create a walker which walks the tree in a DFS manner while maintaining
// the operator stack. The dispatcher
// generates the plan from the operator tree
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
Dispatcher disp = new DefaultRuleDispatcher(getDefaultExprProcessor(), exprRules, exprCtx);
GraphWalker egw = new DefaultGraphWalker(disp);
List<Node> startNodes = Collections.singletonList((Node) expr);
egw.startWalking(startNodes, outputMap);
return (Dependency) outputMap.get(expr);
}
use of org.apache.hadoop.hive.ql.lib.Dispatcher in project hive by apache.
the class LlapPreVectorizationPass method resolve.
@Override
public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
HiveConf conf = pctx.getConf();
LlapMode mode = LlapMode.valueOf(HiveConf.getVar(conf, HiveConf.ConfVars.LLAP_EXECUTION_MODE));
if (mode == none) {
LOG.info("LLAP disabled.");
return pctx;
}
Dispatcher disp = new LlapPreVectorizationPassDispatcher(pctx);
GraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getRootTasks());
ogw.startWalking(topNodes, null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.Dispatcher in project hive by apache.
the class ConstantPropagate method transform.
/**
* Transform the query tree.
*
* @param pactx
* the current parse context
*/
@Override
public ParseContext transform(ParseContext pactx) throws SemanticException {
pGraphContext = pactx;
// generate pruned column list for all relevant operators
ConstantPropagateProcCtx cppCtx = new ConstantPropagateProcCtx(constantPropagateOption);
// create a walker which walks the tree in a DFS manner while maintaining
// the operator stack. The dispatcher
// generates the plan from the operator tree
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
opRules.put(new RuleRegExp("R1", FilterOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getFilterProc());
opRules.put(new RuleRegExp("R2", GroupByOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getGroupByProc());
opRules.put(new RuleRegExp("R3", SelectOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getSelectProc());
opRules.put(new RuleRegExp("R4", FileSinkOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getFileSinkProc());
opRules.put(new RuleRegExp("R5", ReduceSinkOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getReduceSinkProc());
opRules.put(new RuleRegExp("R6", JoinOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getJoinProc());
opRules.put(new RuleRegExp("R7", TableScanOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getTableScanProc());
opRules.put(new RuleRegExp("R8", ScriptOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getStopProc());
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
Dispatcher disp = new DefaultRuleDispatcher(ConstantPropagateProcFactory.getDefaultProc(), opRules, cppCtx);
GraphWalker ogw = new ConstantPropagateWalker(disp);
// Create a list of operator nodes to start the walking.
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pGraphContext.getTopOps().values());
ogw.startWalking(topNodes, null);
for (Operator<? extends Serializable> opToDelete : cppCtx.getOpToDelete()) {
if (opToDelete.getParentOperators() == null || opToDelete.getParentOperators().size() != 1) {
throw new RuntimeException("Error pruning operator " + opToDelete + ". It should have only 1 parent.");
}
opToDelete.getParentOperators().get(0).removeChildAndAdoptItsChildren(opToDelete);
}
cppCtx.getOpToDelete().clear();
return pGraphContext;
}
use of org.apache.hadoop.hive.ql.lib.Dispatcher in project hive by apache.
the class CountDistinctRewriteProc method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
// process group-by pattern
opRules.put(new RuleRegExp("R1", GroupByOperator.getOperatorName() + "%" + ReduceSinkOperator.getOperatorName() + "%" + GroupByOperator.getOperatorName() + "%"), getCountDistinctProc(pctx));
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
Dispatcher disp = new DefaultRuleDispatcher(getDefaultProc(), opRules, null);
GraphWalker ogw = new DefaultGraphWalker(disp);
// Create a list of topop nodes
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pctx;
}
Aggregations