use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class SortedDynPartitionTimeGranularityOptimizer method transform.
@Override
public ParseContext transform(ParseContext pCtx) throws SemanticException {
// create a walker which walks the tree in a DFS manner while maintaining the
// operator stack. The dispatcher generates the plan from the operator tree
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
String FS = FileSinkOperator.getOperatorName() + "%";
opRules.put(new RuleRegExp("Sorted Dynamic Partition Time Granularity", FS), getSortDynPartProc(pCtx));
SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pCtx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pCtx;
}
use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class TablePropertyEnrichmentOptimizer method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
LOG.info("TablePropertyEnrichmentOptimizer::transform().");
Map<SemanticRule, SemanticNodeProcessor> opRules = Maps.newLinkedHashMap();
opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%"), new Processor());
WalkerCtx context = new WalkerCtx(pctx.getConf());
SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, context);
List<Node> topNodes = Lists.newArrayList();
topNodes.addAll(pctx.getTopOps().values());
SemanticGraphWalker walker = new PreOrderWalker(disp);
walker.startWalking(topNodes, null);
LOG.info("TablePropertyEnrichmentOptimizer::transform() complete!");
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class SemanticAnalyzer method replaceViewReferenceWithDefinition.
private void replaceViewReferenceWithDefinition(QB qb, Table tab, String tab_name, String alias) throws SemanticException {
ASTNode viewTree;
final ASTNodeOrigin viewOrigin = new ASTNodeOrigin("VIEW", tab.getTableName(), tab.getViewExpandedText(), alias, qb.getParseInfo().getSrcForAlias(alias));
try {
// Reparse text, passing null for context to avoid clobbering
// the top-level token stream.
String viewFullyQualifiedName = tab.getCompleteName();
String viewText = tab.getViewExpandedText();
TableMask viewMask = new TableMask(this, conf, false);
viewTree = ParseUtils.parse(viewText, ctx, tab.getCompleteName());
cacheTableHelper.populateCacheForView(ctx.getParsedTables(), conf, getTxnMgr(), tab.getDbName(), tab.getTableName());
if (viewMask.isEnabled() && analyzeRewrite == null) {
ParseResult parseResult = rewriteASTWithMaskAndFilter(viewMask, viewTree, ctx.getViewTokenRewriteStream(viewFullyQualifiedName), ctx, db);
viewTree = parseResult.getTree();
}
SemanticDispatcher nodeOriginDispatcher = new SemanticDispatcher() {
@Override
public Object dispatch(Node nd, java.util.Stack<Node> stack, Object... nodeOutputs) {
((ASTNode) nd).setOrigin(viewOrigin);
return null;
}
};
SemanticGraphWalker nodeOriginTagger = new DefaultGraphWalker(nodeOriginDispatcher);
nodeOriginTagger.startWalking(java.util.Collections.<Node>singleton(viewTree), null);
} catch (ParseException e) {
// A user could encounter this if a stored view definition contains
// an old SQL construct which has been eliminated in a later Hive
// version, so we need to provide full debugging info to help
// with fixing the view definition.
LOG.error("Failed to replaceViewReferenceWithDefinition", e);
StringBuilder sb = new StringBuilder();
sb.append(e.getMessage());
ASTErrorUtils.renderOrigin(sb, viewOrigin);
throw new SemanticException(sb.toString(), e);
}
QBExpr qbexpr = new QBExpr(alias);
doPhase1QBExpr(viewTree, qbexpr, qb.getId(), alias, true, null);
// if HIVE_STATS_COLLECT_SCANCOLS is enabled, check.
if ((!this.skipAuthorization() && !qb.isInsideView()) || HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS)) {
qb.rewriteViewToSubq(alias, tab_name, qbexpr, tab);
} else {
qb.rewriteViewToSubq(alias, tab_name, qbexpr, null);
}
}
use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class NullScanOptimizer method resolve.
@Override
public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<>();
opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%.*" + FilterOperator.getOperatorName() + "%"), new WhereFalseProcessor());
SemanticDispatcher disp = new NullScanTaskDispatcher(pctx, opRules);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
List<Node> topNodes = new ArrayList<>(pctx.getRootTasks());
ogw.startWalking(topNodes, null);
opRules.clear();
opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%"), new TSMarker());
opRules.put(new RuleRegExp("R2", LimitOperator.getOperatorName() + "%"), new Limit0Processor());
disp = new NullScanTaskDispatcher(pctx, opRules);
ogw = new DefaultGraphWalker(disp);
topNodes = new ArrayList<>(pctx.getRootTasks());
ogw.startWalking(topNodes, null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class CombineEquivalentWorkResolver method resolve.
@Override
public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
this.pctx = pctx;
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getRootTasks());
// use a pre-order walker so that DPP sink works are visited (and combined) first
SemanticGraphWalker taskWalker = new PreOrderWalker(new EquivalentWorkMatcher());
HashMap<Node, Object> nodeOutput = Maps.newHashMap();
taskWalker.startWalking(topNodes, nodeOutput);
return pctx;
}
Aggregations