Search in sources :

Example 66 with RuleRegExp

use of org.apache.hadoop.hive.ql.lib.RuleRegExp in project hive by apache.

the class HiveOpConverterPostProc method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    // 0. We check the conditions to apply this transformation,
    // if we do not meet them we bail out
    final boolean cboEnabled = HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVE_CBO_ENABLED);
    final boolean returnPathEnabled = HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP);
    final boolean cboSucceeded = pctx.getContext().isCboSucceeded();
    if (!(cboEnabled && returnPathEnabled && cboSucceeded)) {
        return pctx;
    }
    // 1. Initialize aux data structures
    this.pctx = pctx;
    this.aliasToOpInfo = new HashMap<String, Operator<? extends OperatorDesc>>();
    // 2. Trigger transformation
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("R1", JoinOperator.getOperatorName() + "%"), new JoinAnnotate());
    opRules.put(new RuleRegExp("R2", TableScanOperator.getOperatorName() + "%"), new TableScanAnnotate());
    SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
    SemanticGraphWalker ogw = new ForwardWalker(disp);
    List<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : ReduceSinkOperator(org.apache.hadoop.hive.ql.exec.ReduceSinkOperator) JoinOperator(org.apache.hadoop.hive.ql.exec.JoinOperator) TableScanOperator(org.apache.hadoop.hive.ql.exec.TableScanOperator) Operator(org.apache.hadoop.hive.ql.exec.Operator) SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) ForwardWalker(org.apache.hadoop.hive.ql.lib.ForwardWalker) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) OperatorDesc(org.apache.hadoop.hive.ql.plan.OperatorDesc)

Example 67 with RuleRegExp

use of org.apache.hadoop.hive.ql.lib.RuleRegExp in project hive by apache.

the class SimpleFetchAggregation method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    if (pctx.getFetchTask() != null || !pctx.getQueryProperties().isQuery() || pctx.getQueryProperties().isAnalyzeRewrite() || pctx.getQueryProperties().isCTAS() || pctx.getLoadFileWork().size() > 1 || !pctx.getLoadTableWork().isEmpty()) {
        return pctx;
    }
    String GBY = GroupByOperator.getOperatorName() + "%";
    String RS = ReduceSinkOperator.getOperatorName() + "%";
    String SEL = SelectOperator.getOperatorName() + "%";
    String FS = FileSinkOperator.getOperatorName() + "%";
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("R1", GBY + RS + GBY + SEL + FS), new SingleGBYProcessor(pctx));
    opRules.put(new RuleRegExp("R2", GBY + RS + GBY + FS), new SingleGBYProcessor(pctx));
    SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
    SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)

Example 68 with RuleRegExp

use of org.apache.hadoop.hive.ql.lib.RuleRegExp in project hive by apache.

the class RedundantDynamicPruningConditionsRemoval method transform.

/**
 * Transform the query tree.
 *
 * @param pctx the current parse context
 */
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    // Make sure semijoin is not enabled. If it is, then do not remove the dynamic partition pruning predicates.
    if (!pctx.getConf().getBoolVar(HiveConf.ConfVars.TEZ_DYNAMIC_SEMIJOIN_REDUCTION)) {
        Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
        opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%" + FilterOperator.getOperatorName() + "%"), new FilterTransformer());
        SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
        SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
        List<Node> topNodes = new ArrayList<Node>();
        topNodes.addAll(pctx.getTopOps().values());
        ogw.startWalking(topNodes, null);
    }
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) DefaultGraphWalker(org.apache.hadoop.hive.ql.lib.DefaultGraphWalker) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)

Example 69 with RuleRegExp

use of org.apache.hadoop.hive.ql.lib.RuleRegExp in project hive by apache.

the class MapJoinProcessor method transform.

/**
 * Transform the query tree. For each join, check if it is a map-side join (user specified). If
 * yes, convert it to a map-side join.
 *
 * @param pactx
 *          current parse context
 */
@Override
public ParseContext transform(ParseContext pactx) throws SemanticException {
    List<MapJoinOperator> listMapJoinOps = new ArrayList<MapJoinOperator>();
    // traverse all the joins and convert them if necessary
    if (pactx.getJoinOps() != null) {
        Set<JoinOperator> joinMap = new HashSet<JoinOperator>();
        Set<MapJoinOperator> mapJoinMap = pactx.getMapJoinOps();
        if (mapJoinMap == null) {
            mapJoinMap = new HashSet<MapJoinOperator>();
            pactx.setMapJoinOps(mapJoinMap);
        }
        Iterator<JoinOperator> joinCtxIter = pactx.getJoinOps().iterator();
        while (joinCtxIter.hasNext()) {
            JoinOperator joinOp = joinCtxIter.next();
            int mapJoinPos = mapSideJoin(joinOp);
            if (mapJoinPos >= 0) {
                MapJoinOperator mapJoinOp = generateMapJoinOperator(pactx, joinOp, mapJoinPos);
                listMapJoinOps.add(mapJoinOp);
                mapJoinOp.getConf().setQBJoinTreeProps(joinOp.getConf());
                mapJoinMap.add(mapJoinOp);
            } else {
                joinOp.getConf().setQBJoinTreeProps(joinOp.getConf());
                joinMap.add(joinOp);
            }
        }
        // store the new joinContext
        pactx.setJoinOps(joinMap);
    }
    // Go over the list and find if a reducer is not needed
    List<AbstractMapJoinOperator<? extends MapJoinDesc>> listMapJoinOpsNoRed = new ArrayList<AbstractMapJoinOperator<? extends MapJoinDesc>>();
    // create a walker which walks the tree in a DFS manner while maintaining
    // the operator stack.
    // The dispatcher generates the plan from the operator tree
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("R0", MapJoinOperator.getOperatorName() + "%"), getCurrentMapJoin());
    opRules.put(new RuleRegExp("R1", MapJoinOperator.getOperatorName() + "%.*" + FileSinkOperator.getOperatorName() + "%"), getMapJoinFS());
    opRules.put(new RuleRegExp("R2", MapJoinOperator.getOperatorName() + "%.*" + ReduceSinkOperator.getOperatorName() + "%"), getMapJoinDefault());
    opRules.put(new RuleRegExp("R4", MapJoinOperator.getOperatorName() + "%.*" + UnionOperator.getOperatorName() + "%"), getMapJoinDefault());
    // The dispatcher fires the processor corresponding to the closest matching
    // rule and passes the context along
    SemanticDispatcher disp = new DefaultRuleDispatcher(getDefault(), opRules, new MapJoinWalkerCtx(listMapJoinOpsNoRed, pactx));
    SemanticGraphWalker ogw = new GenMapRedWalker(disp);
    ArrayList<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(listMapJoinOps);
    ogw.startWalking(topNodes, null);
    pactx.setListMapJoinOpsNoReducer(listMapJoinOpsNoRed);
    return pactx;
}
Also used : LateralViewJoinOperator(org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator) MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) AbstractMapJoinOperator(org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator) SMBMapJoinOperator(org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator) JoinOperator(org.apache.hadoop.hive.ql.exec.JoinOperator) Node(org.apache.hadoop.hive.ql.lib.Node) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) HashSet(java.util.HashSet) MapJoinOperator(org.apache.hadoop.hive.ql.exec.MapJoinOperator) AbstractMapJoinOperator(org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator) SMBMapJoinOperator(org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator) MapJoinDesc(org.apache.hadoop.hive.ql.plan.MapJoinDesc) SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) AbstractMapJoinOperator(org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) GenMapRedWalker(org.apache.hadoop.hive.ql.parse.GenMapRedWalker) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor) EnabledOverride(org.apache.hadoop.hive.ql.optimizer.physical.Vectorizer.EnabledOverride)

Example 70 with RuleRegExp

use of org.apache.hadoop.hive.ql.lib.RuleRegExp in project hive by apache.

the class PartitionColumnsSeparator method transform.

@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
    // 1. Trigger transformation
    Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
    opRules.put(new RuleRegExp("R1", FilterOperator.getOperatorName() + "%"), new StructInTransformer());
    SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
    SemanticGraphWalker ogw = new ForwardWalker(disp);
    List<Node> topNodes = new ArrayList<Node>();
    topNodes.addAll(pctx.getTopOps().values());
    ogw.startWalking(topNodes, null);
    return pctx;
}
Also used : SemanticRule(org.apache.hadoop.hive.ql.lib.SemanticRule) ForwardWalker(org.apache.hadoop.hive.ql.lib.ForwardWalker) DefaultRuleDispatcher(org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher) Node(org.apache.hadoop.hive.ql.lib.Node) RuleRegExp(org.apache.hadoop.hive.ql.lib.RuleRegExp) ArrayList(java.util.ArrayList) SemanticGraphWalker(org.apache.hadoop.hive.ql.lib.SemanticGraphWalker) LinkedHashMap(java.util.LinkedHashMap) SemanticDispatcher(org.apache.hadoop.hive.ql.lib.SemanticDispatcher) SemanticNodeProcessor(org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)

Aggregations

RuleRegExp (org.apache.hadoop.hive.ql.lib.RuleRegExp)74 LinkedHashMap (java.util.LinkedHashMap)73 Node (org.apache.hadoop.hive.ql.lib.Node)72 DefaultRuleDispatcher (org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher)71 ArrayList (java.util.ArrayList)70 SemanticNodeProcessor (org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor)57 SemanticRule (org.apache.hadoop.hive.ql.lib.SemanticRule)57 SemanticGraphWalker (org.apache.hadoop.hive.ql.lib.SemanticGraphWalker)56 SemanticDispatcher (org.apache.hadoop.hive.ql.lib.SemanticDispatcher)55 DefaultGraphWalker (org.apache.hadoop.hive.ql.lib.DefaultGraphWalker)33 NodeProcessor (org.apache.hadoop.hive.ql.lib.NodeProcessor)17 Rule (org.apache.hadoop.hive.ql.lib.Rule)17 ReduceSinkOperator (org.apache.hadoop.hive.ql.exec.ReduceSinkOperator)16 GraphWalker (org.apache.hadoop.hive.ql.lib.GraphWalker)16 Dispatcher (org.apache.hadoop.hive.ql.lib.Dispatcher)15 FileSinkOperator (org.apache.hadoop.hive.ql.exec.FileSinkOperator)14 MapJoinOperator (org.apache.hadoop.hive.ql.exec.MapJoinOperator)13 TableScanOperator (org.apache.hadoop.hive.ql.exec.TableScanOperator)13 Operator (org.apache.hadoop.hive.ql.exec.Operator)12 UnionOperator (org.apache.hadoop.hive.ql.exec.UnionOperator)11