use of org.apache.hadoop.hive.ql.lib.SemanticRule in project hive by apache.
the class SimpleFetchAggregation method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
if (pctx.getFetchTask() != null || !pctx.getQueryProperties().isQuery() || pctx.getQueryProperties().isAnalyzeRewrite() || pctx.getQueryProperties().isCTAS() || pctx.getLoadFileWork().size() > 1 || !pctx.getLoadTableWork().isEmpty()) {
return pctx;
}
String GBY = GroupByOperator.getOperatorName() + "%";
String RS = ReduceSinkOperator.getOperatorName() + "%";
String SEL = SelectOperator.getOperatorName() + "%";
String FS = FileSinkOperator.getOperatorName() + "%";
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
opRules.put(new RuleRegExp("R1", GBY + RS + GBY + SEL + FS), new SingleGBYProcessor(pctx));
opRules.put(new RuleRegExp("R2", GBY + RS + GBY + FS), new SingleGBYProcessor(pctx));
SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pctx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.SemanticRule in project hive by apache.
the class SortedDynPartitionTimeGranularityOptimizer method transform.
@Override
public ParseContext transform(ParseContext pCtx) throws SemanticException {
// create a walker which walks the tree in a DFS manner while maintaining the
// operator stack. The dispatcher generates the plan from the operator tree
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
String FS = FileSinkOperator.getOperatorName() + "%";
opRules.put(new RuleRegExp("Sorted Dynamic Partition Time Granularity", FS), getSortDynPartProc(pCtx));
SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pCtx.getTopOps().values());
ogw.startWalking(topNodes, null);
return pCtx;
}
use of org.apache.hadoop.hive.ql.lib.SemanticRule in project hive by apache.
the class TablePropertyEnrichmentOptimizer method transform.
@Override
public ParseContext transform(ParseContext pctx) throws SemanticException {
LOG.info("TablePropertyEnrichmentOptimizer::transform().");
Map<SemanticRule, SemanticNodeProcessor> opRules = Maps.newLinkedHashMap();
opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%"), new Processor());
WalkerCtx context = new WalkerCtx(pctx.getConf());
SemanticDispatcher disp = new DefaultRuleDispatcher(null, opRules, context);
List<Node> topNodes = Lists.newArrayList();
topNodes.addAll(pctx.getTopOps().values());
SemanticGraphWalker walker = new PreOrderWalker(disp);
walker.startWalking(topNodes, null);
LOG.info("TablePropertyEnrichmentOptimizer::transform() complete!");
return pctx;
}
use of org.apache.hadoop.hive.ql.lib.SemanticRule in project hive by apache.
the class UnionProcessor method transform.
/**
* Transform the query tree. For each union, store the fact whether both the
* sub-queries are map-only
*
* @param pCtx
* the current parse context
*/
public ParseContext transform(ParseContext pCtx) throws SemanticException {
// create a walker which walks the tree in a BFS manner while maintaining
// the operator stack.
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<SemanticRule, SemanticNodeProcessor>();
opRules.put(new RuleRegExp("R1", ReduceSinkOperator.getOperatorName() + "%.*" + UnionOperator.getOperatorName() + "%"), UnionProcFactory.getMapRedUnion());
opRules.put(new RuleRegExp("R2", UnionOperator.getOperatorName() + "%.*" + UnionOperator.getOperatorName() + "%"), UnionProcFactory.getUnknownUnion());
opRules.put(new RuleRegExp("R3", TableScanOperator.getOperatorName() + "%.*" + UnionOperator.getOperatorName() + "%"), UnionProcFactory.getMapUnion());
// The dispatcher fires the processor for the matching rule and passes the
// context along
UnionProcContext uCtx = new UnionProcContext();
uCtx.setParseContext(pCtx);
SemanticDispatcher disp = new DefaultRuleDispatcher(UnionProcFactory.getNoUnion(), opRules, uCtx);
LevelOrderWalker ogw = new LevelOrderWalker(disp);
ogw.setNodeTypes(UnionOperator.class);
// Create a list of topop nodes
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.addAll(pCtx.getTopOps().values());
ogw.startWalking(topNodes, null);
pCtx.setUCtx(uCtx);
// Walk the tree again to see if the union can be removed completely
HiveConf conf = pCtx.getConf();
opRules.clear();
if (conf.getBoolVar(HiveConf.ConfVars.HIVE_OPTIMIZE_UNION_REMOVE) && !conf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("spark")) {
opRules.put(new RuleRegExp("R5", UnionOperator.getOperatorName() + "%" + ".*" + FileSinkOperator.getOperatorName() + "%"), UnionProcFactory.getUnionNoProcessFile());
disp = new DefaultRuleDispatcher(UnionProcFactory.getNoUnion(), opRules, uCtx);
ogw = new LevelOrderWalker(disp);
ogw.setNodeTypes(FileSinkOperator.class);
// Create a list of topop nodes
topNodes.clear();
topNodes.addAll(pCtx.getTopOps().values());
ogw.startWalking(topNodes, null);
}
return pCtx;
}
use of org.apache.hadoop.hive.ql.lib.SemanticRule in project hive by apache.
the class NullScanOptimizer method resolve.
@Override
public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException {
Map<SemanticRule, SemanticNodeProcessor> opRules = new LinkedHashMap<>();
opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%.*" + FilterOperator.getOperatorName() + "%"), new WhereFalseProcessor());
SemanticDispatcher disp = new NullScanTaskDispatcher(pctx, opRules);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
List<Node> topNodes = new ArrayList<>(pctx.getRootTasks());
ogw.startWalking(topNodes, null);
opRules.clear();
opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + "%"), new TSMarker());
opRules.put(new RuleRegExp("R2", LimitOperator.getOperatorName() + "%"), new Limit0Processor());
disp = new NullScanTaskDispatcher(pctx, opRules);
ogw = new DefaultGraphWalker(disp);
topNodes = new ArrayList<>(pctx.getRootTasks());
ogw.startWalking(topNodes, null);
return pctx;
}
Aggregations