Search in sources :

Example 91 with SemanticDispatcher

use of org.apache.hadoop.hive.ql.lib.SemanticDispatcher in project hive by apache.

the class SparkMapJoinResolver method resolve.

@Override
public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
    SemanticDispatcher dispatcher = new SparkMapJoinTaskDispatcher(pctx);
    TaskGraphWalker graphWalker = new TaskGraphWalker(dispatcher);
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getRootTasks());
    graphWalker.startWalking(topNodes, null);
    return pctx;
}
Also used : TaskGraphWalker(org.apache.hadoop.hive.ql.lib.TaskGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) ArrayList(java.util.ArrayList)

Example 92 with SemanticDispatcher

use of org.apache.hadoop.hive.ql.lib.SemanticDispatcher in project hive by apache.

the class AnnotateWithStatistics method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    AnnotateStatsProcCtx aspCtx = new AnnotateStatsProcCtx(pctx);
    // create a walker which walks the tree in a BFS manner while maintaining the
    // operator stack. The dispatcher generates the plan from the operator tree
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("TS", TableScanOperator.getOperatorName() + "%"), StatsRulesProcFactory.getTableScanRule());
    opRules.put(new RuleRegExp("SEL", SelectOperator.getOperatorName() + "%"), StatsRulesProcFactory.getSelectRule());
    opRules.put(new RuleRegExp("FIL", FilterOperator.getOperatorName() + "%"), StatsRulesProcFactory.getFilterRule());
    opRules.put(new RuleRegExp("GBY", GroupByOperator.getOperatorName() + "%"), StatsRulesProcFactory.getGroupByRule());
    opRules.put(new RuleRegExp("JOIN", CommonJoinOperator.getOperatorName() + "%|" + MapJoinOperator.getOperatorName() + "%"), StatsRulesProcFactory.getJoinRule());
    opRules.put(new RuleRegExp("LIM", LimitOperator.getOperatorName() + "%"), StatsRulesProcFactory.getLimitRule());
    opRules.put(new RuleRegExp("RS", ReduceSinkOperator.getOperatorName() + "%"), StatsRulesProcFactory.getReduceSinkRule());
    opRules.put(new RuleRegExp("UDTF", UDTFOperator.getOperatorName() + "%"), StatsRulesProcFactory.getUDTFRule());
    opRules.put(new RuleRegExp("LVJ", LateralViewJoinOperator.getOperatorName() + "%"), StatsRulesProcFactory.getLateralViewJoinRule());
    // The dispatcher fires the processor corresponding to the closest matching
    // rule and passes the context along
    SemanticDispatcher disp = new DefaultRuleDispatcher(StatsRulesProcFactory.getDefaultRule(), opRules, aspCtx);
    SemanticGraphWalker ogw = new LevelOrderWalker(disp, 0);
    // Create a list of topop nodes
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) LevelOrderWalker(org.apache.hadoop.hive.ql.lib.LevelOrderWalker)

Example 93 with SemanticDispatcher

use of org.apache.hadoop.hive.ql.lib.SemanticDispatcher in project hive by apache.

the class SerializeFilter method resolve.

@Override
public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
    pctx.getConf();
    // create dispatcher and graph walker
    SemanticDispatcher disp = new Serializer(pctx);
    TaskGraphWalker ogw = new TaskGraphWalker(disp);
    // get all the tasks nodes from root task
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getRootTasks());
    // begin to walk through the task tree.
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : TaskGraphWalker(org.apache.hadoop.hive.ql.lib.TaskGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) ArrayList(java.util.ArrayList)

Example 94 with SemanticDispatcher

use of org.apache.hadoop.hive.ql.lib.SemanticDispatcher in project hive by apache.

the class AccumuloPredicateHandler method generateRanges.

/**
 * Encapsulates the traversal over some {@link ExprNodeDesc} tree for the generation of Accumulo.
 * Ranges using expressions involving the Accumulo rowid-mapped Hive column.
 *
 * @param conf
 *          Hadoop configuration
 * @param columnMapper
 *          Mapping of Hive to Accumulo columns for the query
 * @param hiveRowIdColumnName
 *          Name of the hive column mapped to the Accumulo rowid
 * @param root
 *          Root of some ExprNodeDesc tree to traverse, the WHERE clause
 * @return An object representing the result from the ExprNodeDesc tree traversal using the
 *         AccumuloRangeGenerator
 */
protected Object generateRanges(Configuration conf, ColumnMapper columnMapper, String hiveRowIdColumnName, ExprNodeDesc root) {
    AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(conf, handler, columnMapper.getRowIdMapping(), hiveRowIdColumnName);
    SemanticDispatcher disp = new DefaultRuleDispatcher(rangeGenerator, Collections.<SemanticRule, SemanticNodeProcessor>emptyMap(), null);
    SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
    List<Node> roots = new ArrayList<Node>();
    roots.add(root);
    HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>();
    try {
        ogw.startWalking(roots, nodeOutput);
    } catch (SemanticException ex) {
        throw new RuntimeException(ex);
    }
    return nodeOutput.get(root);
}
Also used : DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) HashMap(java.util.HashMap) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

SemanticDispatcher (org.apache.hadoop.hive.ql.lib.SemanticDispatcher)94 Node (org.apache.hadoop.hive.ql.lib.Node)92 ArrayList (java.util.ArrayList)84 SemanticGraphWalker (org.apache.hadoop.hive.ql.lib.SemanticGraphWalker)84 DefaultRuleDispatcher (org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher)77 SemanticNodeProcessor (org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)65 LinkedHashMap (java.util.LinkedHashMap)64 SemanticRule (org.apache.hadoop.hive.ql.lib.SemanticRule)64 RuleRegExp (org.apache.hadoop.hive.ql.lib.RuleRegExp)55 DefaultGraphWalker (org.apache.hadoop.hive.ql.lib.DefaultGraphWalker)52 HashMap (java.util.HashMap)19 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)15 ExprNodeGenericFuncDesc (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)14 ExprNodeColumnDesc (org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc)13 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 MapJoinOperator (org.apache.hadoop.hive.ql.exec.MapJoinOperator)10 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)10 ExprNodeConstantDesc (org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc)10 Test (org.junit.Test)10 List (java.util.List)9