use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class TypeCheckProcFactory method genExprNode.
protected Map<ASTNode, T> genExprNode(ASTNode expr, TypeCheckCtx tcCtx) throws SemanticException {
// Create the walker, the rules dispatcher and the context.
// create a walker which walks the tree in a DFS manner while maintaining
// the operator stack. The dispatcher
// generates the plan from the operator tree
SetMultimap<Integer, SemanticNodeProcessor> astNodeToProcessor = HashMultimap.create();
astNodeToProcessor.put(HiveParser.TOK_NULL, getNullExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_PARAMETER, getDynamicParameterProcessor());
astNodeToProcessor.put(HiveParser.Number, getNumExprProcessor());
astNodeToProcessor.put(HiveParser.IntegralLiteral, getNumExprProcessor());
astNodeToProcessor.put(HiveParser.NumberLiteral, getNumExprProcessor());
astNodeToProcessor.put(HiveParser.Identifier, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.StringLiteral, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_CHARSETLITERAL, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_STRINGLITERALSEQUENCE, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_IF, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_CASE, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_WHEN, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_IN, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_ARRAY, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_MAP, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_STRUCT, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_EXISTS, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_SUBQUERY_OP_NOTIN, getStrExprProcessor());
astNodeToProcessor.put(HiveParser.KW_TRUE, getBoolExprProcessor());
astNodeToProcessor.put(HiveParser.KW_FALSE, getBoolExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_DATELITERAL, getDateTimeExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_TIMESTAMPLITERAL, getDateTimeExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_TIMESTAMPLOCALTZLITERAL, getDateTimeExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_YEAR_MONTH_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_DAY_TIME_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_YEAR_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_MONTH_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_DAY_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_HOUR_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_MINUTE_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_INTERVAL_SECOND_LITERAL, getIntervalExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_TABLE_OR_COL, getColumnExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_SUBQUERY_EXPR, getSubQueryExprProcessor());
astNodeToProcessor.put(HiveParser.TOK_ALIAS, getValueAliasProcessor());
// The dispatcher fires the processor corresponding to the closest matching
// rule and passes the context along
SemanticDispatcher disp = new CostLessRuleDispatcher(getDefaultExprProcessor(), astNodeToProcessor, tcCtx);
SemanticGraphWalker ogw = new SubqueryExpressionWalker(disp);
// Create a list of top nodes
ArrayList<Node> topNodes = Lists.<Node>newArrayList(expr);
HashMap<Node, Object> nodeOutputs = new LinkedHashMap<Node, Object>();
ogw.startWalking(topNodes, nodeOutputs);
return convert(nodeOutputs);
}
use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class TestAccumuloRangeGenerator method testRangeOverBooleanIndexedField.
@Test
public void testRangeOverBooleanIndexedField() throws Exception {
// mgr == true
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.booleanTypeInfo, "mgr", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, true);
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, new GenericUDFOPEqual(), children);
assertNotNull(node);
AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(conf, handler, rowIdMapping, "rid");
rangeGenerator.setIndexScanner(TestAccumuloDefaultIndexScanner.buildMockHandler(10));
SemanticDispatcher disp = new DefaultRuleDispatcher(rangeGenerator, Collections.<SemanticRule, SemanticNodeProcessor>emptyMap(), null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.add(node);
HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>();
try {
ogw.startWalking(topNodes, nodeOutput);
} catch (SemanticException ex) {
throw new RuntimeException(ex);
}
// Filters are using an index which should match 2 rows
Object result = nodeOutput.get(node);
if (result instanceof List) {
List results = (List) result;
Assert.assertEquals(2, results.size());
Assert.assertTrue("does not contain row1", results.contains(new Range("row1")));
Assert.assertTrue("does not contain row3", results.contains(new Range("row3")));
} else {
Assert.fail("Results not a list");
}
}
use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class TestAccumuloRangeGenerator method testPartialRangeConjunction.
@Test
public void testPartialRangeConjunction() throws Exception {
// rowId >= 'f'
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "f");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), children);
assertNotNull(node);
// anythingElse <= 'foo'
ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "anythingElse", null, false);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "foo");
List<ExprNodeDesc> children2 = Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children2);
assertNotNull(node2);
// And UDF
List<ExprNodeDesc> bothFilters = Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
// Should generate [f,+inf)
List<Range> expectedRanges = Arrays.asList(new Range(new Key("f"), true, null, false));
AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(conf, handler, rowIdMapping, "rid");
SemanticDispatcher disp = new DefaultRuleDispatcher(rangeGenerator, Collections.<SemanticRule, SemanticNodeProcessor>emptyMap(), null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.add(both);
HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>();
try {
ogw.startWalking(topNodes, nodeOutput);
} catch (SemanticException ex) {
throw new RuntimeException(ex);
}
Object result = nodeOutput.get(both);
Assert.assertNotNull(result);
Assert.assertTrue("Result from graph walk was not a List", result instanceof List);
@SuppressWarnings("unchecked") List<Range> actualRanges = (List<Range>) result;
Assert.assertEquals(expectedRanges, actualRanges);
}
use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class TestAccumuloRangeGenerator method testCastExpression.
@Test
public void testCastExpression() throws Exception {
// 40 and 50
ExprNodeDesc fourty = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 40), fifty = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 50);
// +
GenericUDFOPPlus plus = new GenericUDFOPPlus();
// 40 + 50
ExprNodeGenericFuncDesc addition = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, plus, Arrays.asList(fourty, fifty));
// cast(.... as string)
GenericUDFToString stringCast = new GenericUDFToString();
// cast (40 + 50 as string)
ExprNodeGenericFuncDesc cast = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, stringCast, "cast", Collections.<ExprNodeDesc>singletonList(addition));
ExprNodeDesc key = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "key", null, false);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), Arrays.asList(key, cast));
AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(conf, handler, rowIdMapping, "key");
SemanticDispatcher disp = new DefaultRuleDispatcher(rangeGenerator, Collections.<SemanticRule, SemanticNodeProcessor>emptyMap(), null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.add(node);
HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>();
try {
ogw.startWalking(topNodes, nodeOutput);
} catch (SemanticException ex) {
throw new RuntimeException(ex);
}
// Don't fail -- would be better to actually compute a range of [90,+inf)
Object result = nodeOutput.get(node);
Assert.assertNull(result);
}
use of org.apache.hadoop.hive.ql.lib.SemanticGraphWalker in project hive by apache.
the class TestAccumuloRangeGenerator method testRangeDisjunction.
@Test
public void testRangeDisjunction() throws Exception {
// rowId >= 'f'
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "f");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), children);
assertNotNull(node);
// rowId <= 'm'
ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "m");
List<ExprNodeDesc> children2 = Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children2);
assertNotNull(node2);
// Or UDF
List<ExprNodeDesc> bothFilters = Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPOr(), bothFilters);
// Should generate (-inf,+inf)
List<Range> expectedRanges = Arrays.asList(new Range());
AccumuloRangeGenerator rangeGenerator = new AccumuloRangeGenerator(conf, handler, rowIdMapping, "rid");
SemanticDispatcher disp = new DefaultRuleDispatcher(rangeGenerator, Collections.<SemanticRule, SemanticNodeProcessor>emptyMap(), null);
SemanticGraphWalker ogw = new DefaultGraphWalker(disp);
ArrayList<Node> topNodes = new ArrayList<Node>();
topNodes.add(both);
HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>();
try {
ogw.startWalking(topNodes, nodeOutput);
} catch (SemanticException ex) {
throw new RuntimeException(ex);
}
Object result = nodeOutput.get(both);
Assert.assertNotNull(result);
Assert.assertTrue("Result from graph walk was not a List", result instanceof List);
@SuppressWarnings("unchecked") List<Range> actualRanges = (List<Range>) result;
Assert.assertEquals(expectedRanges, actualRanges);
}
Aggregations