use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class TestParquetFilterPredicate method testFilterCharColumnEquals.
@Test
public void testFilterCharColumnEquals() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().equals("a", PredicateLeaf.Type.STRING, new HiveChar("apple", 10).toString()).build();
MessageType schema = MessageTypeParser.parseMessageType("message test {required binary a;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "eq(a, Binary{\"apple\"})";
assertEquals(expected, p.toString());
}
use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class TestParquetFilterPredicate method testFilterCharColumnWhiteSpacePostfix.
@Test
public void testFilterCharColumnWhiteSpacePostfix() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("a", PredicateLeaf.Type.STRING, new HiveChar("apple ", 10).toString()).lessThanEquals("b", PredicateLeaf.Type.STRING, new HiveChar("pear ", 10).toString()).equals("c", PredicateLeaf.Type.STRING, new HiveChar("orange ", 10).toString()).nullSafeEquals("d", PredicateLeaf.Type.STRING, new HiveChar("pineapple ", 10).toString()).end().build();
MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required binary a; required binary b;" + " required binary c; required binary d;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
columnTypes.put("b", TypeInfoFactory.getCharTypeInfo(10));
columnTypes.put("c", TypeInfoFactory.getCharTypeInfo(10));
columnTypes.put("d", TypeInfoFactory.getCharTypeInfo(10));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "and(and(and(" + "lt(a, Binary{\"apple\"}), " + "lteq(b, Binary{\"pear\"})), " + "eq(c, Binary{\"orange\"})), " + "eq(d, Binary{\"pineapple\"}))";
assertEquals(expected, p.toString());
}
use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class TestParquetFilterPredicate method testFilterCharColumnLessThan.
@Test
public void testFilterCharColumnLessThan() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().lessThan("a", PredicateLeaf.Type.STRING, new HiveChar("apple", 10).toString()).build();
MessageType schema = MessageTypeParser.parseMessageType("message test {required binary a;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "lt(a, Binary{\"apple\"})";
assertEquals(expected, p.toString());
}
use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class TestParquetFilterPredicate method testFilterCharColumnNullSafeEquals.
@Test
public void testFilterCharColumnNullSafeEquals() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().nullSafeEquals("a", PredicateLeaf.Type.STRING, new HiveChar("apple", 10).toString()).build();
MessageType schema = MessageTypeParser.parseMessageType("message test {required binary a;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "eq(a, Binary{\"apple\"})";
assertEquals(expected, p.toString());
}
use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.
the class TestParquetFilterPredicate method testFilterFloatColumn.
@Test
public void testFilterFloatColumn() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("x", PredicateLeaf.Type.LONG, 22L).lessThan("x1", PredicateLeaf.Type.LONG, 22L).lessThanEquals("y", PredicateLeaf.Type.STRING, new HiveChar("hi", 10).toString()).equals("z", PredicateLeaf.Type.FLOAT, Double.valueOf(0.22)).equals("z1", PredicateLeaf.Type.FLOAT, Double.valueOf(0.22)).end().build();
MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required int32 x; required int32 x1;" + " required binary y; required float z; required float z1;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("x", TypeInfoFactory.getPrimitiveTypeInfo("int"));
columnTypes.put("x1", TypeInfoFactory.getPrimitiveTypeInfo("int"));
columnTypes.put("y", TypeInfoFactory.getCharTypeInfo(10));
columnTypes.put("z", TypeInfoFactory.getPrimitiveTypeInfo("float"));
columnTypes.put("z1", TypeInfoFactory.getPrimitiveTypeInfo("float"));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "and(and(and(and(lt(x, 22), lt(x1, 22))," + " lteq(y, Binary{\"hi\"})), eq(z, " + "0.22)), eq(z1, 0.22))";
assertEquals(expected, p.toString());
}
Aggregations