use of org.apache.hadoop.hive.ql.lib.Dispatcher in project hive by apache.
the class SyntheticJoinPredicate method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
boolean enabled = false;
String queryEngine = pctx.getConf().getVar(ConfVars.HIVE_EXECUTION_ENGINE);
if (queryEngine.equals("tez") && pctx.getConf().getBoolVar(ConfVars.TEZ_DYNAMIC_PARTITION_PRUNING)) {
enabled = true;
} else if ((queryEngine.equals("spark") && pctx.getConf().isSparkDPPAny())) {
enabled = true;
}
if (!enabled) {
return pctx;
}
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
opRules.put(new RuleRegExp("R1", "(" + TableScanOperator.getOperatorName() + "%" + ".*" + ReduceSinkOperator.getOperatorName() + "%" + JoinOperator.getOperatorName() + "%)"), new JoinSynthetic());
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
SyntheticContext context = new SyntheticContext(pctx);
Dispatcher disp = new DefaultRuleDispatcher(null, opRules, context);
GraphWalker ogw = new PreOrderOnceWalker(disp);
// Create a list of top op nodes
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.Dispatcher in project flink by apache.
the class HiveParserSemanticAnalyzer method replaceViewReferenceWithDefinition.
private void replaceViewReferenceWithDefinition(HiveParserQB qb, Table tab, String tabName, String alias) throws SemanticException {
HiveParserASTNode viewTree;
final HiveParserASTNodeOrigin viewOrigin = new HiveParserASTNodeOrigin("VIEW", tab.getTableName(), tab.getViewExpandedText(), alias, qb.getParseInfo().getSrcForAlias(alias));
try {
// Reparse text, passing null for context to avoid clobbering
// the top-level token stream.
String viewText = tab.getViewExpandedText();
viewTree = HiveASTParseUtils.parse(viewText, ctx, tab.getCompleteName());
Dispatcher nodeOriginDispatcher = (nd, stack, nodeOutputs) -> {
((HiveParserASTNode) nd).setOrigin(viewOrigin);
return null;
};
GraphWalker nodeOriginTagger = new HiveParserDefaultGraphWalker(nodeOriginDispatcher);
nodeOriginTagger.startWalking(Collections.singleton(viewTree), null);
} catch (HiveASTParseException e) {
// A user could encounter this if a stored view definition contains
// an old SQL construct which has been eliminated in a later Hive
// version, so we need to provide full debugging info to help
// with fixing the view definition.
LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
throw new SemanticException(e.getMessage(), e);
}
HiveParserQBExpr qbexpr = new HiveParserQBExpr(alias);
doPhase1QBExpr(viewTree, qbexpr, qb.getId(), alias, true);
// if HIVE_STATS_COLLECT_SCANCOLS is enabled, check.
if ((!this.skipAuthorization() && !qb.isInsideView() && HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) || HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS)) {
qb.rewriteViewToSubq(alias, tabName, qbexpr, tab);
} else {
qb.rewriteViewToSubq(alias, tabName, qbexpr, null);
}
}
use of org.apache.hadoop.hive.ql.lib.Dispatcher in project hive by apache.
the class SerializeFilter method resolve.
@Override
public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
pctx.getConf();
// create dispatcher and graph walker
Dispatcher disp = new Serializer(pctx);
TaskGraphWalker ogw = new TaskGraphWalker(disp);
// get all the tasks nodes from root task
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getRootTasks());
// begin to walk through the task tree.
ogw.startWalking(topNodes, null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.Dispatcher in project hive by apache.
the class SortMergeJoinResolver method resolve.
@Override
public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
// create dispatcher and graph walker
Dispatcher disp = new SortMergeJoinTaskDispatcher(pctx);
TaskGraphWalker ogw = new TaskGraphWalker(disp);
// get all the tasks nodes from root task
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.rootTasks);
// begin to walk through the task tree.
ogw.startWalking(topNodes, null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.Dispatcher in project hive by apache.
the class TableAccessAnalyzer method analyzeTableAccess.
public TableAccessInfo analyzeTableAccess() throws SemanticException {
// Set up the rules for the graph walker for group by and join operators
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
opRules.put(new RuleRegExp("R1", GroupByOperator.getOperatorName() + "%"), new GroupByProcessor(pGraphContext));
opRules.put(new RuleRegExp("R2", JoinOperator.getOperatorName() + "%"), new JoinProcessor(pGraphContext));
opRules.put(new RuleRegExp("R3", MapJoinOperator.getOperatorName() + "%"), new JoinProcessor(pGraphContext));
TableAccessCtx tableAccessCtx = new TableAccessCtx();
Dispatcher disp = new DefaultRuleDispatcher(getDefaultProc(), opRules, tableAccessCtx);
GraphWalker ogw = new DefaultGraphWalker(disp);
// Create a list of topop nodes and walk!
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pGraphContext.getTopOps().values());
ogw.startWalking(topNodes, null);
return tableAccessCtx.getTableAccessInfo();
}
Aggregations