use of org.apache.hadoop.hive.ql.lib.SubqueryExpressionWalker in project hive by apache.
the class TypeCheckProcFactory method genExprNode.
protected Map<ASTNode, T> genExprNode(ASTNode expr, TypeCheckCtx tcCtx) throws SemanticException {
// Create the walker, the rules dispatcher and the context.
// create a walker which walks the tree in a DFS manner while maintaining
// the operator stack. The dispatcher
// generates the plan from the operator tree
SetMultimap<Integer, SemanticNodeProcessor> astNodeToProcessor = HashMultimap.create();
astNodeToProcessor.put(HiveParser.TOK_NULL, getNullExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_PARAMETER, getDynamicParameterProcessor());
astNodeToProcessor.put(HiveParser.Number, getNumExprProcessor());
astNodeToProcessor.put(HiveParser.IntegralLiteral, getNumExprProcessor());
astNodeToProcessor.put(HiveParser.NumberLiteral, getNumExprProcessor());
astNodeToProcessor.put(HiveParser.Identifier, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.StringLiteral, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_CHARSETLITERAL, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_STRINGLITERALSEQUENCE, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_IF, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_CASE, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_WHEN, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_IN, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_ARRAY, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_MAP, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_STRUCT, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_EXISTS, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_SUBQUERY_OP_NOTIN, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_TRUE, getBoolExprProcessor());
astNodeToProcessor.put(HiveParser.KW_FALSE, getBoolExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_DATELITERAL, getDateTimeExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_TIMESTAMPLITERAL, getDateTimeExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_TIMESTAMPLOCALTZLITERAL, getDateTimeExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_YEAR_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_MONTH_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_DAY_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_HOUR_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_MINUTE_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_SECOND_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_TABLE_OR_COL, getColumnExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_SUBQUERY_EXPR, getSubQueryExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_ALIAS, getValueAliasProcessor());
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
SemanticDispatcher disp = new CostLessRuleDispatcher(getDefaultExprProcessor(), astNodeToProcessor, tcCtx);
SemanticGraphWalker ogw = new SubqueryExpressionWalker(disp);
// Create a list of top nodes
ArrayList<Node> topNodes = Lists.<Node>newArrayList(expr);
HashMap<Node, Object> nodeOutputs = new LinkedHashMap<Node, Object>();
ogw.startWalking(topNodes, nodeOutputs);
return convert(nodeOutputs);
}
Aggregations