Search in sources :

Example 86 with SemanticGraphWalker

use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.

the class AnnotateWithStatistics method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    AnnotateStatsProcCtx aspCtx = new AnnotateStatsProcCtx(pctx);
    // create a walker which walks the tree in a BFS manner while maintaining the
    // operator stack. The dispatcher generates the plan from the operator tree
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("TS", TableScanOperator.getOperatorName() + "%"), StatsRulesProcFactory.getTableScanRule());
    opRules.put(new RuleRegExp("SEL", SelectOperator.getOperatorName() + "%"), StatsRulesProcFactory.getSelectRule());
    opRules.put(new RuleRegExp("FIL", FilterOperator.getOperatorName() + "%"), StatsRulesProcFactory.getFilterRule());
    opRules.put(new RuleRegExp("GBY", GroupByOperator.getOperatorName() + "%"), StatsRulesProcFactory.getGroupByRule());
    opRules.put(new RuleRegExp("JOIN", CommonJoinOperator.getOperatorName() + "%|" + MapJoinOperator.getOperatorName() + "%"), StatsRulesProcFactory.getJoinRule());
    opRules.put(new RuleRegExp("LIM", LimitOperator.getOperatorName() + "%"), StatsRulesProcFactory.getLimitRule());
    opRules.put(new RuleRegExp("RS", ReduceSinkOperator.getOperatorName() + "%"), StatsRulesProcFactory.getReduceSinkRule());
    opRules.put(new RuleRegExp("UDTF", UDTFOperator.getOperatorName() + "%"), StatsRulesProcFactory.getUDTFRule());
    opRules.put(new RuleRegExp("LVJ", LateralViewJoinOperator.getOperatorName() + "%"), StatsRulesProcFactory.getLateralViewJoinRule());
    // The dispatcher fires the processor corresponding to the closest matching
    // rule and passes the context along
    SemanticDispatcher disp = new DefaultRuleDispatcher(StatsRulesProcFactory.getDefaultRule(), opRules, aspCtx);
    SemanticGraphWalker ogw = new LevelOrderWalker(disp, 0);
    // Create a list of topop nodes
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) LevelOrderWalker(org.apache.hadoop.hive.ql.lib.LevelOrderWalker)

Example 87 with SemanticGraphWalker

use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.

the class AccumuloPredicateHandler method generateRanges.

/**
 * Encapsulates the traversal over some {@link ExprNodeDesc} tree for the generation of Accumulo.
 * Ranges using expressions involving the Accumulo rowid-mapped Hive column.
 *
 * @param conf
 *          Hadoop configuration
 * @param columnMapper
 *          Mapping of Hive to Accumulo columns for the query
 * @param hiveRowIdColumnName
 *          Name of the hive column mapped to the Accumulo rowid
 * @param root
 *          Root of some ExprNodeDesc tree to traverse, the WHERE clause
 * @return An object representing the result from the ExprNodeDesc tree traversal using the
 *         AccumuloRangeGenerator
 */
protected Object generateRanges(Configuration conf, ColumnMapper columnMapper, String hiveRowIdColumnName, ExprNodeDesc root) {
    AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(conf, handler, columnMapper.getRowIdMapping(), hiveRowIdColumnName);
    SemanticDispatcher disp = new DefaultRuleDispatcher(rangeGenerator, Collections.<SemanticRule, SemanticNodeProcessor>emptyMap(), null);
    SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
    List<Node> roots = new ArrayList<Node>();
    roots.add(root);
    HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>();
    try {
        ogw.startWalking(roots, nodeOutput);
    } catch (SemanticException ex) {
        throw new RuntimeException(ex);
    }
    return nodeOutput.get(root);
}
Also used : DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) HashMap(java.util.HashMap) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

SemanticGraphWalker (org.apache.hadoop.hive.ql.lib.SemanticGraphWalker)87 Node (org.apache.hadoop.hive.ql.lib.Node)84 SemanticDispatcher (org.apache.hadoop.hive.ql.lib.SemanticDispatcher)84 DefaultRuleDispatcher (org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher)78 ArrayList (java.util.ArrayList)77 SemanticNodeProcessor (org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)66 LinkedHashMap (java.util.LinkedHashMap)65 SemanticRule (org.apache.hadoop.hive.ql.lib.SemanticRule)65 RuleRegExp (org.apache.hadoop.hive.ql.lib.RuleRegExp)56 DefaultGraphWalker (org.apache.hadoop.hive.ql.lib.DefaultGraphWalker)54 HashMap (java.util.HashMap)19 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)14 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)14 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)13 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 MapJoinOperator (org.apache.hadoop.hive.ql.exec.MapJoinOperator)10 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)10 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)10 Test (org.junit.Test)10 List (java.util.List)9