Search in sources :

Example 81 with SemanticGraphWalker

use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.

the class SortedDynPartitionTimeGranularityOptimizer method transform.

@Override
public ParseContext transform(ParseContext pCtx) throws SemanticException {
    // create a walker which walks the tree in a DFS manner while maintaining the
    // operator stack. The dispatcher generates the plan from the operator tree
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    String FS = FileSinkOperator.getOperatorName() + "%";
    opRules.put(new RuleRegExp("Sorted Dynamic Partition Time Granularity", FS), getSortDynPartProc(pCtx));
    SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
    SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pCtx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pCtx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)

Example 82 with SemanticGraphWalker

use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.

the class TablePropertyEnrichmentOptimizer method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    LOG.info("TablePropertyEnrichmentOptimizer::transform().");
    Map<SemanticRule, SemanticNodeProcessor> opRules = Maps.newLinkedHashMap();
    opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%"), new Processor());
    WalkerCtx context = new WalkerCtx(pctx.getConf());
    SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, context);
    List<Node> topNodes = Lists.newArrayList();
    topNodes.addAll(pctx.getTopOps().values());
    SemanticGraphWalker walker = new PreOrderWalker(disp);
    walker.startWalking(topNodes, null);
    LOG.info("TablePropertyEnrichmentOptimizer::transform() complete!");
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) PreOrderWalker(org.apache.hadoop.hive.ql.lib.PreOrderWalker)

Example 83 with SemanticGraphWalker

use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.

the class SemanticAnalyzer method replaceViewReferenceWithDefinition.

private void replaceViewReferenceWithDefinition(QB qb, Table tab, String tab_name, String alias) throws SemanticException {
    ASTNode viewTree;
    final ASTNodeOrigin viewOrigin = new ASTNodeOrigin("VIEW", tab.getTableName(), tab.getViewExpandedText(), alias, qb.getParseInfo().getSrcForAlias(alias));
    try {
        // Reparse text, passing null for context to avoid clobbering
        // the top-level token stream.
        String viewFullyQualifiedName = tab.getCompleteName();
        String viewText = tab.getViewExpandedText();
        TableMask viewMask = new TableMask(this, conf, false);
        viewTree = ParseUtils.parse(viewText, ctx, tab.getCompleteName());
        cacheTableHelper.populateCacheForView(ctx.getParsedTables(), conf, getTxnMgr(), tab.getDbName(), tab.getTableName());
        if (viewMask.isEnabled() && analyzeRewrite == null) {
            ParseResult parseResult = rewriteASTWithMaskAndFilter(viewMask, viewTree, ctx.getViewTokenRewriteStream(viewFullyQualifiedName), ctx, db);
            viewTree = parseResult.getTree();
        }
        SemanticDispatcher nodeOriginDispatcher = new SemanticDispatcher() {

            @Override
            public Object dispatch(Node nd, java.util.Stack<Node> stack, Object... nodeOutputs) {
                ((ASTNode) nd).setOrigin(viewOrigin);
                return null;
            }
        };
        SemanticGraphWalker nodeOriginTagger = new DefaultGraphWalker(nodeOriginDispatcher);
        nodeOriginTagger.startWalking(java.util.Collections.<Node>singleton(viewTree), null);
    } catch (ParseException e) {
        // A user could encounter this if a stored view definition contains
        // an old SQL construct which has been eliminated in a later Hive
        // version, so we need to provide full debugging info to help
        // with fixing the view definition.
        LOG.error("Failed to replaceViewReferenceWithDefinition", e);
        StringBuilder sb = new StringBuilder();
        sb.append(e.getMessage());
        ASTErrorUtils.renderOrigin(sb, viewOrigin);
        throw new SemanticException(sb.toString(), e);
    }
    QBExpr qbexpr = new QBExpr(alias);
    doPhase1QBExpr(viewTree, qbexpr, qb.getId(), alias, true, null);
    // if HIVE_STATS_COLLECT_SCANCOLS is enabled, check.
    if ((!this.skipAuthorization() && !qb.isInsideView()) || HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS)) {
        qb.rewriteViewToSubq(alias, tab_name, qbexpr, tab);
    } else {
        qb.rewriteViewToSubq(alias, tab_name, qbexpr, null);
    }
}
Also used : Node(org.apache.hadoop.hive.ql.lib.Node) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject) CalciteSemanticException(org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException)

Example 84 with SemanticGraphWalker

use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.

the class NullScanOptimizer method resolve.

@Override
public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<>();
    opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%.*" + FilterOperator.getOperatorName() + "%"), new WhereFalseProcessor());
    SemanticDispatcher disp = new NullScanTaskDispatcher(pctx, opRules);
    SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
    List<Node> topNodes = new ArrayList<>(pctx.getRootTasks());
    ogw.startWalking(topNodes, null);
    opRules.clear();
    opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%"), new TSMarker());
    opRules.put(new RuleRegExp("R2", LimitOperator.getOperatorName() + "%"), new Limit0Processor());
    disp = new NullScanTaskDispatcher(pctx, opRules);
    ogw = new DefaultGraphWalker(disp);
    topNodes = new ArrayList<>(pctx.getRootTasks());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)

Example 85 with SemanticGraphWalker

use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.

the class CombineEquivalentWorkResolver method resolve.

@Override
public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
    this.pctx = pctx;
    List<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getRootTasks());
    // use a pre-order walker so that DPP sink works are visited (and combined) first
    SemanticGraphWalker taskWalker = new PreOrderWalker(new EquivalentWorkMatcher());
    HashMap<Node, Object> nodeOutput = Maps.newHashMap();
    taskWalker.startWalking(topNodes, nodeOutput);
    return pctx;
}
Also used : Node(org.apache.hadoop.hive.ql.lib.Node) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) PreOrderWalker(org.apache.hadoop.hive.ql.lib.PreOrderWalker)

Aggregations

SemanticGraphWalker (org.apache.hadoop.hive.ql.lib.SemanticGraphWalker)87 Node (org.apache.hadoop.hive.ql.lib.Node)84 SemanticDispatcher (org.apache.hadoop.hive.ql.lib.SemanticDispatcher)84 DefaultRuleDispatcher (org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher)78 ArrayList (java.util.ArrayList)77 SemanticNodeProcessor (org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)66 LinkedHashMap (java.util.LinkedHashMap)65 SemanticRule (org.apache.hadoop.hive.ql.lib.SemanticRule)65 RuleRegExp (org.apache.hadoop.hive.ql.lib.RuleRegExp)56 DefaultGraphWalker (org.apache.hadoop.hive.ql.lib.DefaultGraphWalker)54 HashMap (java.util.HashMap)19 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)14 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)14 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)13 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 MapJoinOperator (org.apache.hadoop.hive.ql.exec.MapJoinOperator)10 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)10 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)10 Test (org.junit.Test)10 List (java.util.List)9