use of org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor in project hive by apache.
the class PredicatePushDown method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
pGraphContext = pctx;
// create a the context for walking operators
OpWalkerInfo opWalkerInfo = new OpWalkerInfo(pGraphContext);
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
opRules.put(new RuleRegExp("R1", FilterOperator.getOperatorName() + "%"), OpProcFactory.getFilterProc());
opRules.put(new RuleRegExp("R2", PTFOperator.getOperatorName() + "%"), OpProcFactory.getPTFProc());
opRules.put(new RuleRegExp("R3", CommonJoinOperator.getOperatorName() + "%"), OpProcFactory.getJoinProc());
opRules.put(new RuleRegExp("R4", TableScanOperator.getOperatorName() + "%"), OpProcFactory.getTSProc());
opRules.put(new RuleRegExp("R5", ScriptOperator.getOperatorName() + "%"), OpProcFactory.getSCRProc());
opRules.put(new RuleRegExp("R6", LimitOperator.getOperatorName() + "%"), OpProcFactory.getLIMProc());
opRules.put(new RuleRegExp("R7", UDTFOperator.getOperatorName() + "%"), OpProcFactory.getUDTFProc());
opRules.put(new RuleRegExp("R8", LateralViewForwardOperator.getOperatorName() + "%"), OpProcFactory.getLVFProc());
opRules.put(new RuleRegExp("R9", LateralViewJoinOperator.getOperatorName() + "%"), OpProcFactory.getLVJProc());
opRules.put(new RuleRegExp("R10", ReduceSinkOperator.getOperatorName() + "%"), OpProcFactory.getRSProc());
opRules.put(new RuleRegExp("R11", GroupByOperator.getOperatorName() + "%"), OpProcFactory.getGBYProc());
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
SemanticDispatcher disp = new DefaultRuleDispatcher(OpProcFactory.getDefaultProc(), opRules, opWalkerInfo);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
// Create a list of topop nodes
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pGraphContext.getTopOps().values());
ogw.startWalking(topNodes, null);
if (LOG.isDebugEnabled()) {
LOG.debug("After PPD:\n" + Operator.toString(pctx.getTopOps().values()));
}
return pGraphContext;
}
use of org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor in project hive by apache.
the class SimplePredicatePushDown method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
pGraphContext = pctx;
// create a the context for walking operators
OpWalkerInfo opWalkerInfo = new OpWalkerInfo(pGraphContext);
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
opRules.put(new RuleRegExp("R1", FilterOperator.getOperatorName() + "%"), OpProcFactory.getFilterSyntheticJoinPredicateProc());
opRules.put(new RuleRegExp("R2", PTFOperator.getOperatorName() + "%"), OpProcFactory.getPTFProc());
opRules.put(new RuleRegExp("R3", CommonJoinOperator.getOperatorName() + "%"), OpProcFactory.getJoinProc());
opRules.put(new RuleRegExp("R4", TableScanOperator.getOperatorName() + "%"), OpProcFactory.getTSProc());
opRules.put(new RuleRegExp("R5", ScriptOperator.getOperatorName() + "%"), OpProcFactory.getSCRProc());
opRules.put(new RuleRegExp("R6", LimitOperator.getOperatorName() + "%"), OpProcFactory.getLIMProc());
opRules.put(new RuleRegExp("R7", UDTFOperator.getOperatorName() + "%"), OpProcFactory.getUDTFProc());
opRules.put(new RuleRegExp("R8", LateralViewForwardOperator.getOperatorName() + "%"), OpProcFactory.getLVFProc());
opRules.put(new RuleRegExp("R9", LateralViewJoinOperator.getOperatorName() + "%"), OpProcFactory.getLVJProc());
opRules.put(new RuleRegExp("R10", ReduceSinkOperator.getOperatorName() + "%"), OpProcFactory.getRSProc());
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
SemanticDispatcher disp = new DefaultRuleDispatcher(OpProcFactory.getDefaultProc(), opRules, opWalkerInfo);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
// Create a list of topop nodes
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pGraphContext.getTopOps().values());
ogw.startWalking(topNodes, null);
if (LOG.isDebugEnabled()) {
LOG.debug("After PPD:\n" + Operator.toString(pctx.getTopOps().values()));
}
return pGraphContext;
}
use of org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor in project hive by apache.
the class PredicateTransitivePropagate method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
pGraphContext = pctx;
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
opRules.put(new RuleRegExp("R1", "(" + FilterOperator.getOperatorName() + "%" + ReduceSinkOperator.getOperatorName() + "%" + JoinOperator.getOperatorName() + "%)"), new JoinTransitive());
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
TransitiveContext context = new TransitiveContext();
SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, context);
SemanticGraphWalker ogw = new LevelOrderWalker(disp, 2);
// Create a list of topop nodes
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pGraphContext.getTopOps().values());
ogw.startWalking(topNodes, null);
Map<ReduceSinkOperator, List<ExprNodeDesc>> newFilters = context.getNewfilters();
// insert new filter between RS and parent of RS
for (Map.Entry<ReduceSinkOperator, List<ExprNodeDesc>> entry : newFilters.entrySet()) {
ReduceSinkOperator reducer = entry.getKey();
Operator<?> parent = reducer.getParentOperators().get(0);
List<ExprNodeDesc> exprs = entry.getValue();
if (parent instanceof FilterOperator) {
exprs = ExprNodeDescUtils.split(((FilterOperator) parent).getConf().getPredicate(), exprs);
ExprNodeDesc merged = ExprNodeDescUtils.mergePredicates(exprs);
((FilterOperator) parent).getConf().setPredicate(merged);
} else {
ExprNodeDesc merged = ExprNodeDescUtils.mergePredicates(exprs);
RowSchema parentRS = parent.getSchema();
Operator<FilterDesc> newFilter = createFilter(reducer, parent, parentRS, merged);
}
}
return pGraphContext;
}
use of org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor in project hive by apache.
the class SyntheticJoinPredicate method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
boolean enabled = false;
String queryEngine = pctx.getConf().getVar(ConfVars.HIVE_EXECUTION_ENGINE);
if (queryEngine.equals("tez") && pctx.getConf().getBoolVar(ConfVars.TEZ_DYNAMIC_PARTITION_PRUNING)) {
enabled = true;
} else if ((queryEngine.equals("spark") && pctx.getConf().isSparkDPPAny())) {
enabled = true;
}
if (!enabled) {
return pctx;
}
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
opRules.put(new RuleRegExp("R1", "(" + TableScanOperator.getOperatorName() + "%" + ".*" + ReduceSinkOperator.getOperatorName() + "%" + JoinOperator.getOperatorName() + "%)"), new JoinSynthetic());
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
SyntheticContext context = new SyntheticContext(pctx);
SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, context);
SemanticGraphWalker ogw = new PreOrderOnceWalker(disp);
// Create a list of top op nodes
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor in project hive by apache.
the class LineageInfo method getLineageInfo.
/**
* parses given query and gets the lineage info.
*
* @param query
*/
public void getLineageInfo(String query, Context ctx) throws Exception {
/*
* Get the AST tree
*/
ASTNode tree = ParseUtils.parse(query, ctx);
while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
tree = (ASTNode) tree.getChild(0);
}
/*
* initialize Event Processor and dispatcher.
*/
inputTableList.clear();
OutputTableList.clear();
// create a walker which walks the tree in a DFS manner while maintaining
// the operator stack. The dispatcher
// generates the plan from the operator tree
Map<SemanticRule, SemanticNodeProcessor> rules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
SemanticDispatcher disp = new DefaultRuleDispatcher(this, rules, null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
// Create a list of topop nodes
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.add(tree);
ogw.startWalking(topNodes, null);
}
Aggregations