use of org.apache.hadoop.hive.ql.lib.RuleRegExp in project hive by apache.
the class SortedDynPartitionOptimizer method transform.
@Override
public ParseContext transform(ParseContext pCtx) throws SemanticException {
// create a walker which walks the tree in a DFS manner while maintaining the
// operator stack. The dispatcher generates the plan from the operator tree
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
String FS = FileSinkOperator.getOperatorName() + "%";
opRules.put(new RuleRegExp("Sorted Dynamic Partition", FS), getSortDynPartProc(pCtx));
Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
GraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pCtx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pCtx;
}
use of org.apache.hadoop.hive.ql.lib.RuleRegExp in project hive by apache.
the class SortedDynPartitionTimeGranularityOptimizer method transform.
@Override
public ParseContext transform(ParseContext pCtx) throws SemanticException {
// create a walker which walks the tree in a DFS manner while maintaining the
// operator stack. The dispatcher generates the plan from the operator tree
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
String FS = FileSinkOperator.getOperatorName() + "%";
opRules.put(new RuleRegExp("Sorted Dynamic Partition Time Granularity", FS), getSortDynPartProc(pCtx));
Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
GraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pCtx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pCtx;
}
use of org.apache.hadoop.hive.ql.lib.RuleRegExp in project hive by apache.
the class SortedMergeBucketMapJoinOptimizer method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
HiveConf conf = pctx.getConf();
SortBucketJoinProcCtx smbJoinContext = new SortBucketJoinProcCtx(conf);
// Get a list of joins which cannot be converted to a sort merge join
// Only selects and filters operators are allowed between the table scan and
// join currently. More operators can be added - the method supportAutomaticSortMergeJoin
// dictates which operator is allowed
getListOfRejectedJoins(pctx, smbJoinContext);
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
// go through all map joins and find out all which have enabled bucket map
// join.
opRules.put(new RuleRegExp("R1", MapJoinOperator.getOperatorName() + "%"), getSortedMergeBucketMapjoinProc(pctx));
// converted to sort-merge join
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTO_SORTMERGE_JOIN)) {
opRules.put(new RuleRegExp("R2", "JOIN%"), getSortedMergeJoinProc(pctx));
}
Dispatcher disp = new DefaultRuleDispatcher(getDefaultProc(), opRules, smbJoinContext);
GraphWalker ogw = new DefaultGraphWalker(disp);
// Create a list of topop nodes
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.RuleRegExp in project hive by apache.
the class PartitionColumnsSeparator method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
// 1. Trigger transformation
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
opRules.put(new RuleRegExp("R1", FilterOperator.getOperatorName() + "%"), new StructInTransformer());
Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
GraphWalker ogw = new ForwardWalker(disp);
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.RuleRegExp in project hive by apache.
the class HiveOpConverterPostProc method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
// 0. We check the conditions to apply this transformation,
// if we do not meet them we bail out
final boolean cboEnabled = HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVE_CBO_ENABLED);
final boolean returnPathEnabled = HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP);
final boolean cboSucceeded = pctx.getContext().isCboSucceeded();
if (!(cboEnabled && returnPathEnabled && cboSucceeded)) {
return pctx;
}
// 1. Initialize aux data structures
this.pctx = pctx;
this.aliasToOpInfo = new HashMap<String, Operator<? extends OperatorDesc>>();
// 2. Trigger transformation
Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>();
opRules.put(new RuleRegExp("R1", JoinOperator.getOperatorName() + "%"), new JoinAnnotate());
opRules.put(new RuleRegExp("R2", TableScanOperator.getOperatorName() + "%"), new TableScanAnnotate());
Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
GraphWalker ogw = new ForwardWalker(disp);
List<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pctx;
}
Aggregations