use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class DagUtils method collectFileSinkUris.
private void collectFileSinkUris(List<Node> topNodes, Set<URI> uris, Set<TableDesc> tableDescs) {
CollectFileSinkUrisNodeProcessor np = new CollectFileSinkUrisNodeProcessor(uris, tableDescs);
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
addCollectFileSinkUrisRules(opRules, np);
SemanticDispatcher disp = new DefaultRuleDispatcher(np, opRules, null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
try {
ogw.startWalking(topNodes, null);
} catch (SemanticException e) {
throw new RuntimeException(e);
}
}
use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class ConstantPropagate method transform.
/**
* Transform the query tree.
*
* @param pactx
* the current parse context
*/
@Override
public ParseContext transform(ParseContext pactx) throws SemanticException {
pGraphContext = pactx;
// generate pruned column list for all relevant operators
ConstantPropagateProcCtx cppCtx = new ConstantPropagateProcCtx(constantPropagateOption);
// create a walker which walks the tree in a DFS manner while maintaining
// the operator stack. The dispatcher
// generates the plan from the operator tree
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
opRules.put(new RuleRegExp("R1", FilterOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getFilterProc());
opRules.put(new RuleRegExp("R2", GroupByOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getGroupByProc());
opRules.put(new RuleRegExp("R3", SelectOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getSelectProc());
opRules.put(new RuleRegExp("R4", FileSinkOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getFileSinkProc());
opRules.put(new RuleRegExp("R5", ReduceSinkOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getReduceSinkProc());
opRules.put(new RuleRegExp("R6", JoinOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getJoinProc());
opRules.put(new RuleRegExp("R7", TableScanOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getTableScanProc());
opRules.put(new RuleRegExp("R8", ScriptOperator.getOperatorName() + "%"), ConstantPropagateProcFactory.getStopProc());
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
SemanticDispatcher disp = new DefaultRuleDispatcher(ConstantPropagateProcFactory.getDefaultProc(), opRules, cppCtx);
SemanticGraphWalker ogw = new ConstantPropagateWalker(disp);
// Create a list of operator nodes to start the walking.
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pGraphContext.getTopOps().values());
ogw.startWalking(topNodes, null);
for (Operator<? extends Serializable> opToDelete : cppCtx.getOpToDelete()) {
if (opToDelete.getParentOperators() == null || opToDelete.getParentOperators().size() != 1) {
throw new RuntimeException("Error pruning operator " + opToDelete + ". It should have only 1 parent.");
}
opToDelete.getParentOperators().get(0).removeChildAndAdoptItsChildren(opToDelete);
}
cppCtx.getOpToDelete().clear();
return pGraphContext;
}
use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class CountDistinctRewriteProc method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
// process group-by pattern
opRules.put(new RuleRegExp("R1", GroupByOperator.getOperatorName() + "%" + ReduceSinkOperator.getOperatorName() + "%" + GroupByOperator.getOperatorName() + "%"), getCountDistinctProc(pctx));
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
SemanticDispatcher disp = new DefaultRuleDispatcher(getDefaultProc(), opRules, null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
// Create a list of topop nodes
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class BucketingSortingReduceSinkOptimizer method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
// process reduce sink added by hive.enforce.bucketing or hive.enforce.sorting
opRules.put(new RuleRegExp("R1", ReduceSinkOperator.getOperatorName() + "%" + SelectOperator.getOperatorName() + "%" + FileSinkOperator.getOperatorName() + "%"), getBucketSortReduceSinkProc(pctx));
// The dispatcher fires the processor corresponding to the closest matching rule
SemanticDispatcher disp = new DefaultRuleDispatcher(getDefaultProc(), opRules, null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
// Create a list of top nodes
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class BucketMapJoinOptimizer method transform.
public ParseContext transform(ParseContext pctx) throws SemanticException {
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
BucketJoinProcCtx bucketMapJoinOptimizeCtx = new BucketJoinProcCtx(pctx.getConf());
// process map joins with no reducers pattern
opRules.put(new RuleRegExp("R1", MapJoinOperator.getOperatorName() + "%"), getBucketMapjoinProc(pctx));
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
SemanticDispatcher disp = new DefaultRuleDispatcher(getDefaultProc(), opRules, bucketMapJoinOptimizeCtx);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
// Create a list of topop nodes
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pctx;
}
Aggregations