use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project hive by apache.
the class TestAccumuloPredicateHandler method testIgnoreIteratorPushdown.
@Test
public void testIgnoreIteratorPushdown() throws TooManyAccumuloColumnsException {
// Override what's placed in the Configuration by setup()
conf = new JobConf();
List<String> columnNames = Arrays.asList("field1", "field2", "rid");
List<TypeInfo> columnTypes = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo);
conf.set(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columnNames));
conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,int,string");
String columnMappingStr = "cf:f1,cf:f2,:rowID";
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, columnMappingStr);
columnMapper = new ColumnMapper(columnMappingStr, ColumnEncoding.STRING.getName(), columnNames, columnTypes);
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "field1", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children);
assertNotNull(node);
ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field2", null, false);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5);
List<ExprNodeDesc> children2 = Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2);
assertNotNull(node2);
List<ExprNodeDesc> bothFilters = Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
String filterExpr = SerializationUtilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
conf.setBoolean(AccumuloSerDeParameters.ITERATOR_PUSHDOWN_KEY, false);
try {
List<IteratorSetting> iterators = handler.getIterators(conf, columnMapper);
assertEquals(iterators.size(), 0);
} catch (Exception e) {
fail(StringUtils.stringifyException(e));
}
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project hive by apache.
the class TestAccumuloPredicateHandler method testRangeEqual.
@Test()
public void testRangeEqual() throws SerDeException {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
Collection<Range> ranges = handler.getRanges(conf, columnMapper);
assertEquals(ranges.size(), 1);
Range range = ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertTrue(range.contains(new Key(new Text("aaa"))));
assertTrue(range.afterEndKey(new Key(new Text("aab"))));
assertTrue(range.beforeStartKey(new Key(new Text("aa"))));
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project hive by apache.
the class TestAccumuloPredicateHandler method testBinaryRangeGeneration.
@Test
public void testBinaryRangeGeneration() throws Exception {
List<String> columnNames = Arrays.asList("key", "column");
List<TypeInfo> columnTypes = Arrays.<TypeInfo>asList(TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo);
conf.set(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columnNames));
conf.set(serdeConstants.LIST_COLUMN_TYPES, "int,string");
String columnMappingStr = ":rowID#b,cf:f1";
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, columnMappingStr);
columnMapper = new ColumnMapper(columnMappingStr, ColumnEncoding.STRING.getName(), columnNames, columnTypes);
int intValue = 100;
// Make binary integer value in the bytearray
ByteArrayOutputStream baos = new ByteArrayOutputStream();
JavaIntObjectInspector intOI = (JavaIntObjectInspector) PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME));
LazyUtils.writePrimitive(baos, intValue, intOI);
// 100 < key
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "key", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, intValue);
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(constant);
children.add(column);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPLessThan(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
// Should make (100, +inf)
List<Range> ranges = handler.getRanges(conf, columnMapper);
Assert.assertEquals(1, ranges.size());
Assert.assertEquals(new Range(new Text(baos.toByteArray()), false, null, false), ranges.get(0));
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project hive by apache.
the class TestAccumuloPredicateHandler method rangeGreaterThanOrEqual.
@Test
public void rangeGreaterThanOrEqual() throws SerDeException {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
Collection<Range> ranges = handler.getRanges(conf, columnMapper);
assertEquals(ranges.size(), 1);
Range range = ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertTrue(range.contains(new Key(new Text("aaa"))));
assertFalse(range.afterEndKey(new Key(new Text("ccccc"))));
assertTrue(range.contains(new Key(new Text("aab"))));
assertTrue(range.beforeStartKey(new Key(new Text("aa"))));
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project mongo-hadoop by mongodb.
the class HiveMongoInputFormatTest method testTranslateConjoinedQuery.
@Test
public void testTranslateConjoinedQuery() {
// i < 50
GenericUDFOPLessThan lt = new GenericUDFOPLessThan();
ExprNodeDesc[] iLt50Children = { new ExprNodeColumnDesc(new SimpleMockColumnInfo("i")), new ExprNodeConstantDesc(50) };
ExprNodeGenericFuncDesc iLt50 = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, lt, Arrays.asList(iLt50Children));
// j > 20
GenericUDFOPGreaterThan gt = new GenericUDFOPGreaterThan();
ExprNodeDesc[] jGt20Children = { new ExprNodeColumnDesc(new SimpleMockColumnInfo("j")), new ExprNodeConstantDesc(20) };
ExprNodeGenericFuncDesc jGt20 = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, gt, Arrays.asList(jGt20Children));
// i < 50 AND j > 20
ExprNodeDesc[] andExprChildren = { iLt50, jGt20 };
ExprNodeGenericFuncDesc expr = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(), Arrays.asList(andExprChildren));
assertEquals(// {"$and": [{"i": {"$lt": 50}}, {"j": {"$gt": 20}}]}
new BasicDBObjectBuilder().push("mongo_i").add("$lt", 50).pop().push("mongo_j").add("$gt", 20).pop().get(), filterForExpr(expr));
}
Aggregations