Search in sources :

Example 71 with FilterPredicate

use of org.apache.parquet.filter2.predicate.FilterPredicate in project hive by apache.

the class TestParquetFilterPredicate method testFilterCharColumnBetween.

@Test
public void testFilterCharColumnBetween() throws Exception {
    SearchArgument sarg = SearchArgumentFactory.newBuilder().between("a", PredicateLeaf.Type.STRING, new HiveChar("apple", 10).toString(), new HiveChar("pear", 10).toString()).build();
    MessageType schema = MessageTypeParser.parseMessageType("message test {required binary a;}");
    Map<String, TypeInfo> columnTypes = new HashMap<>();
    columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
    FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
    String expected = "and(lteq(a, Binary{\"pear\"}), not(lt(a, Binary{\"apple\"})))";
    assertEquals(expected, p.toString());
}
Also used : HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 72 with FilterPredicate

use of org.apache.parquet.filter2.predicate.FilterPredicate in project hive by apache.

the class TestParquetFilterPredicate method testFilterStringColumnWithWhiteSpaces.

@Test
public void testFilterStringColumnWithWhiteSpaces() throws Exception {
    SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("a", PredicateLeaf.Type.STRING, new HiveVarchar(" apple  ", 10).toString()).lessThanEquals("b", PredicateLeaf.Type.STRING, new HiveVarchar(" pear", 10).toString()).equals("c", PredicateLeaf.Type.STRING, new HiveVarchar("orange ", 10).toString()).end().build();
    MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required binary a; required binary b;" + " required binary c;}");
    Map<String, TypeInfo> columnTypes = new HashMap<>();
    columnTypes.put("a", TypeInfoFactory.getPrimitiveTypeInfo("string"));
    columnTypes.put("b", TypeInfoFactory.getPrimitiveTypeInfo("string"));
    columnTypes.put("c", TypeInfoFactory.getPrimitiveTypeInfo("string"));
    FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
    String expected = "and(and(" + "lt(a, Binary{\" apple  \"}), " + "lteq(b, Binary{\" pear\"})), " + "eq(c, Binary{\"orange \"}))";
    assertEquals(expected, p.toString());
}
Also used : HashMap(java.util.HashMap) SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 73 with FilterPredicate

use of org.apache.parquet.filter2.predicate.FilterPredicate in project hive by apache.

the class TestParquetFilterPredicate method testFilterMoreComplexVarCharColumn.

@Test
public void testFilterMoreComplexVarCharColumn() throws Exception {
    // ((a=pear or a<=cherry) and (b=orange)) and (c=banana or d<cherry)
    SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().startAnd().startOr().equals("a", PredicateLeaf.Type.STRING, new HiveVarchar("pear", 10).toString()).lessThanEquals("a", PredicateLeaf.Type.STRING, new HiveVarchar("cherry", 10).toString()).end().equals("b", PredicateLeaf.Type.STRING, new HiveVarchar("orange", 10).toString()).end().startOr().equals("c", PredicateLeaf.Type.STRING, new HiveVarchar("banana", 10).toString()).lessThan("d", PredicateLeaf.Type.STRING, new HiveVarchar("cherry", 10).toString()).end().end().build();
    MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required binary a; required binary b;" + " required binary c; required binary d;}");
    Map<String, TypeInfo> columnTypes = new HashMap<>();
    columnTypes.put("a", TypeInfoFactory.getVarcharTypeInfo(10));
    columnTypes.put("b", TypeInfoFactory.getVarcharTypeInfo(10));
    columnTypes.put("c", TypeInfoFactory.getVarcharTypeInfo(10));
    columnTypes.put("d", TypeInfoFactory.getVarcharTypeInfo(10));
    FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
    String expected = "and(and(or(eq(a, Binary{\"pear\"}), lteq(a, Binary{\"cherry\"})), " + "eq(b, Binary{\"orange\"})), " + "or(eq(c, Binary{\"banana\"}), lt(d, Binary{\"cherry\"})))";
    assertEquals(expected, p.toString());
}
Also used : HashMap(java.util.HashMap) SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 74 with FilterPredicate

use of org.apache.parquet.filter2.predicate.FilterPredicate in project hive by apache.

the class TestParquetFilterPredicate method testFilterCharColumnEquals.

@Test
public void testFilterCharColumnEquals() throws Exception {
    SearchArgument sarg = SearchArgumentFactory.newBuilder().equals("a", PredicateLeaf.Type.STRING, new HiveChar("apple", 10).toString()).build();
    MessageType schema = MessageTypeParser.parseMessageType("message test {required binary a;}");
    Map<String, TypeInfo> columnTypes = new HashMap<>();
    columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
    FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
    String expected = "eq(a, Binary{\"apple\"})";
    assertEquals(expected, p.toString());
}
Also used : HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 75 with FilterPredicate

use of org.apache.parquet.filter2.predicate.FilterPredicate in project hive by apache.

the class TestParquetFilterPredicate method testFilterCharColumnWhiteSpacePostfix.

@Test
public void testFilterCharColumnWhiteSpacePostfix() throws Exception {
    SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("a", PredicateLeaf.Type.STRING, new HiveChar("apple  ", 10).toString()).lessThanEquals("b", PredicateLeaf.Type.STRING, new HiveChar("pear  ", 10).toString()).equals("c", PredicateLeaf.Type.STRING, new HiveChar("orange  ", 10).toString()).nullSafeEquals("d", PredicateLeaf.Type.STRING, new HiveChar("pineapple ", 10).toString()).end().build();
    MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required binary a; required binary b;" + " required binary c; required binary d;}");
    Map<String, TypeInfo> columnTypes = new HashMap<>();
    columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
    columnTypes.put("b", TypeInfoFactory.getCharTypeInfo(10));
    columnTypes.put("c", TypeInfoFactory.getCharTypeInfo(10));
    columnTypes.put("d", TypeInfoFactory.getCharTypeInfo(10));
    FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
    String expected = "and(and(and(" + "lt(a, Binary{\"apple\"}), " + "lteq(b, Binary{\"pear\"})), " + "eq(c, Binary{\"orange\"})), " + "eq(d, Binary{\"pineapple\"}))";
    assertEquals(expected, p.toString());
}
Also used : HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Aggregations

FilterPredicate (org.apache.parquet.filter2.predicate.FilterPredicate)76 Test (org.junit.Test)50 HashMap (java.util.HashMap)33 MessageType (org.apache.parquet.schema.MessageType)33 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)32 SearchArgument (org.apache.hadoop.hive.ql.io.sarg.SearchArgument)25 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)12 BinaryColumn (org.apache.parquet.filter2.predicate.Operators.BinaryColumn)8 ArrayList (java.util.ArrayList)5 List (java.util.List)5 Group (org.apache.parquet.example.data.Group)5 Configuration (org.apache.hadoop.conf.Configuration)4 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)4 User (org.apache.parquet.filter2.recordlevel.PhoneBookWriter.User)4 Predicate (java.util.function.Predicate)3 Path (org.apache.hadoop.fs.Path)3 Pair (uk.gov.gchq.gaffer.commonutil.pair.Pair)3 TupleAdaptedPredicate (uk.gov.gchq.koryphe.tuple.predicate.TupleAdaptedPredicate)3 HashSet (java.util.HashSet)2 GenericRecord (org.apache.avro.generic.GenericRecord)2