use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project hive by apache.
the class TestAccumuloPredicateHandler method rangeLessThanOrEqual.
@Test
public void rangeLessThanOrEqual() throws SerDeException {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
Collection<Range> ranges = handler.getRanges(conf, columnMapper);
assertEquals(ranges.size(), 1);
Range range = ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertTrue(range.contains(new Key(new Text("aaa"))));
assertTrue(range.afterEndKey(new Key(new Text("ccccc"))));
assertTrue(range.contains(new Key(new Text("aa"))));
assertTrue(range.afterEndKey(new Key(new Text("aab"))));
assertFalse(range.afterEndKey(new Key(new Text("aaa"))));
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project hive by apache.
the class TestAccumuloPredicateHandler method testIteratorIgnoreRowIDFields.
@Test
public void testIteratorIgnoreRowIDFields() {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children);
assertNotNull(node);
ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "bbb");
List<ExprNodeDesc> children2 = Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2);
assertNotNull(node2);
List<ExprNodeDesc> bothFilters = Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
String filterExpr = SerializationUtilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
try {
List<IteratorSetting> iterators = handler.getIterators(conf, columnMapper);
assertEquals(iterators.size(), 0);
} catch (SerDeException e) {
StringUtils.stringifyException(e);
}
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project hive by apache.
the class TestAccumuloPredicateHandler method testRangeGreaterThan.
@Test()
public void testRangeGreaterThan() throws SerDeException {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
Collection<Range> ranges = handler.getRanges(conf, columnMapper);
assertEquals(ranges.size(), 1);
Range range = ranges.iterator().next();
assertTrue(range.isStartKeyInclusive());
assertFalse(range.isEndKeyInclusive());
assertFalse(range.contains(new Key(new Text("aaa"))));
assertFalse(range.afterEndKey(new Key(new Text("ccccc"))));
assertTrue(range.contains(new Key(new Text("aab"))));
assertTrue(range.beforeStartKey(new Key(new Text("aa"))));
assertTrue(range.beforeStartKey(new Key(new Text("aaa"))));
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project hive by apache.
the class TestAccumuloPredicateHandler method testRowRangeIntersection.
@Test
public void testRowRangeIntersection() throws SerDeException {
// rowId >= 'f'
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "f");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), children);
assertNotNull(node);
// rowId <= 'm'
ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "m");
List<ExprNodeDesc> children2 = Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children2);
assertNotNull(node2);
List<ExprNodeDesc> bothFilters = Lists.newArrayList();
bothFilters.add(node);
bothFilters.add(node2);
ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters);
String filterExpr = SerializationUtilities.serializeExpression(both);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
// Should make ['f', 'm\0')
List<Range> ranges = handler.getRanges(conf, columnMapper);
assertEquals(1, ranges.size());
assertEquals(new Range(new Key("f"), true, new Key("m\0"), false), ranges.get(0));
}
use of org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc in project hive by apache.
the class TestAccumuloPredicateHandler method testRowRangeGeneration.
@Test
public void testRowRangeGeneration() throws SerDeException {
List<String> columnNames = Arrays.asList("key", "column");
List<TypeInfo> columnTypes = Arrays.<TypeInfo>asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo);
conf.set(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columnNames));
conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,string");
String columnMappingStr = ":rowID,cf:f1";
conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, columnMappingStr);
columnMapper = new ColumnMapper(columnMappingStr, ColumnEncoding.STRING.getName(), columnNames, columnTypes);
// 100 < key
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "key", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 100);
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(constant);
children.add(column);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPLessThan(), children);
assertNotNull(node);
String filterExpr = SerializationUtilities.serializeExpression(node);
conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
// Should make (100, +inf)
List<Range> ranges = handler.getRanges(conf, columnMapper);
Assert.assertEquals(1, ranges.size());
Assert.assertEquals(new Range(new Text("100"), false, null, false), ranges.get(0));
}
Aggregations