use of org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.And in project hive by apache.
the class TestParquetFilterPredicate method testFilterBetween.
@Test
public void testFilterBetween() {
MessageType schema = MessageTypeParser.parseMessageType("message test { required int32 bCol; }");
SearchArgument sarg = SearchArgumentFactory.newBuilder().between("bCol", PredicateLeaf.Type.LONG, 1L, 5L).build();
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("bCol", TypeInfoFactory.getPrimitiveTypeInfo("int"));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "and(lteq(bCol, 5), not(lt(bCol, 1)))";
assertEquals(expected, p.toString());
sarg = SearchArgumentFactory.newBuilder().between("bCol", PredicateLeaf.Type.LONG, 5L, 1L).build();
p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
expected = "and(lteq(bCol, 1), not(lt(bCol, 5)))";
assertEquals(expected, p.toString());
sarg = SearchArgumentFactory.newBuilder().between("bCol", PredicateLeaf.Type.LONG, 1L, 1L).build();
p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
expected = "and(lteq(bCol, 1), not(lt(bCol, 1)))";
assertEquals(expected, p.toString());
}
use of org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.And in project hive by apache.
the class TestParquetFilterPredicate method testFilterFloatColumn.
@Test
public void testFilterFloatColumn() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("x", PredicateLeaf.Type.LONG, 22L).lessThan("x1", PredicateLeaf.Type.LONG, 22L).lessThanEquals("y", PredicateLeaf.Type.STRING, new HiveChar("hi", 10).toString()).equals("z", PredicateLeaf.Type.FLOAT, Double.valueOf(0.22)).equals("z1", PredicateLeaf.Type.FLOAT, Double.valueOf(0.22)).end().build();
MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required int32 x; required int32 x1;" + " required binary y; required float z; required float z1;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("x", TypeInfoFactory.getPrimitiveTypeInfo("int"));
columnTypes.put("x1", TypeInfoFactory.getPrimitiveTypeInfo("int"));
columnTypes.put("y", TypeInfoFactory.getCharTypeInfo(10));
columnTypes.put("z", TypeInfoFactory.getPrimitiveTypeInfo("float"));
columnTypes.put("z1", TypeInfoFactory.getPrimitiveTypeInfo("float"));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "and(and(and(and(lt(x, 22), lt(x1, 22))," + " lteq(y, Binary{\"hi\"})), eq(z, " + "0.22)), eq(z1, 0.22))";
assertEquals(expected, p.toString());
}
use of org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.And in project hive by apache.
the class TestParquetFilterPredicate method testFilterVarCharColumn.
@Test
public void testFilterVarCharColumn() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("a", PredicateLeaf.Type.STRING, new HiveVarchar("apple", 10).toString()).lessThanEquals("b", PredicateLeaf.Type.STRING, new HiveVarchar("pear", 10).toString()).equals("c", PredicateLeaf.Type.STRING, new HiveVarchar("orange", 10).toString()).nullSafeEquals("d", PredicateLeaf.Type.STRING, new HiveVarchar("pineapple", 9).toString()).in("e", PredicateLeaf.Type.STRING, new HiveVarchar("cherry", 10).toString(), new HiveVarchar("orange", 10).toString()).between("f", PredicateLeaf.Type.STRING, new HiveVarchar("apple", 10).toString(), new HiveVarchar("pear", 10).toString()).isNull("g", PredicateLeaf.Type.STRING).end().build();
MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required binary a; required binary b;" + " required binary c; required binary d;" + " required binary e; required binary f;" + " required binary g;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("b", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("c", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("d", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("e", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("f", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("g", TypeInfoFactory.getVarcharTypeInfo(10));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "and(and(and(and(and(and(" + "lt(a, Binary{\"apple\"}), " + "lteq(b, Binary{\"pear\"})), " + "eq(c, Binary{\"orange\"})), " + "eq(d, Binary{\"pineapple\"})), " + "or(eq(e, Binary{\"cherry\"}), eq(e, Binary{\"orange\"}))), " + "and(lteq(f, Binary{\"pear\"}), not(lt(f, Binary{\"apple\"})))), " + "eq(g, null))";
assertEquals(expected, p.toString());
}
use of org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.And in project hive by apache.
the class TestParquetFilterPredicate method testFilterFloatColumns.
@Test
public void testFilterFloatColumns() {
MessageType schema = MessageTypeParser.parseMessageType("message test { required float a; required int32 b; }");
SearchArgument sarg = SearchArgumentFactory.newBuilder().startNot().startOr().isNull("a", PredicateLeaf.Type.FLOAT).between("a", PredicateLeaf.Type.FLOAT, 10.2, 20.3).in("b", PredicateLeaf.Type.LONG, 1L, 2L, 3L).end().end().build();
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getPrimitiveTypeInfo("float"));
columnTypes.put("b", TypeInfoFactory.getPrimitiveTypeInfo("int"));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "and(and(not(eq(a, null)), not(and(lteq(a, 20.3), not(lt(a, 10.2))))), not(or(or(eq(b, 1), eq(b, 2)), eq(b, 3))))";
assertEquals(expected, p.toString());
}
use of org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.And in project presto by prestodb.
the class TupleDomainParquetPredicate method getDomain.
/**
* Get a domain for the ranges defined by each pair of elements from {@code minimums} and {@code maximums}.
* Both arrays must have the same length.
*/
private static Domain getDomain(ColumnDescriptor column, Type type, List<Object> minimums, List<Object> maximums, boolean hasNullValue) {
checkArgument(minimums.size() == maximums.size(), "Expected minimums and maximums to have the same size");
List<Range> ranges = new ArrayList<>();
if (type.equals(BOOLEAN)) {
boolean hasTrueValues = minimums.stream().anyMatch(value -> (boolean) value) || maximums.stream().anyMatch(value -> (boolean) value);
boolean hasFalseValues = minimums.stream().anyMatch(value -> !(boolean) value) || maximums.stream().anyMatch(value -> !(boolean) value);
if (hasTrueValues && hasFalseValues) {
return Domain.all(type);
}
if (hasTrueValues) {
return Domain.create(ValueSet.of(type, true), hasNullValue);
}
if (hasFalseValues) {
return Domain.create(ValueSet.of(type, false), hasNullValue);
}
// All nulls case is handled earlier
throw new VerifyException("Impossible boolean statistics");
}
if ((type.equals(BIGINT) || type.equals(TINYINT) || type.equals(SMALLINT) || type.equals(INTEGER))) {
for (int i = 0; i < minimums.size(); i++) {
long min = asLong(minimums.get(i));
long max = asLong(maximums.get(i));
if (isStatisticsOverflow(type, min, max)) {
return Domain.create(ValueSet.all(type), hasNullValue);
}
ranges.add(Range.range(type, min, true, max, true));
}
checkArgument(!ranges.isEmpty(), "cannot use empty ranges");
return Domain.create(ValueSet.ofRanges(ranges), hasNullValue);
}
if (type.equals(REAL)) {
for (int i = 0; i < minimums.size(); i++) {
Float min = (Float) minimums.get(i);
Float max = (Float) maximums.get(i);
if (min.isNaN() || max.isNaN()) {
return Domain.create(ValueSet.all(type), hasNullValue);
}
ranges.add(Range.range(type, (long) floatToRawIntBits(min), true, (long) floatToRawIntBits(max), true));
}
checkArgument(!ranges.isEmpty(), "cannot use empty ranges");
return Domain.create(ValueSet.ofRanges(ranges), hasNullValue);
}
if (type.equals(DOUBLE)) {
for (int i = 0; i < minimums.size(); i++) {
Double min = (Double) minimums.get(i);
Double max = (Double) maximums.get(i);
if (min.isNaN() || max.isNaN()) {
return Domain.create(ValueSet.all(type), hasNullValue);
}
ranges.add(Range.range(type, min, true, max, true));
}
checkArgument(!ranges.isEmpty(), "cannot use empty ranges");
return Domain.create(ValueSet.ofRanges(ranges), hasNullValue);
}
if (isVarcharType(type)) {
for (int i = 0; i < minimums.size(); i++) {
Slice min = Slices.wrappedBuffer(((Binary) minimums.get(i)).toByteBuffer());
Slice max = Slices.wrappedBuffer(((Binary) maximums.get(i)).toByteBuffer());
ranges.add(Range.range(type, min, true, max, true));
}
checkArgument(!ranges.isEmpty(), "cannot use empty ranges");
return Domain.create(ValueSet.ofRanges(ranges), hasNullValue);
}
if (type.equals(DATE)) {
for (int i = 0; i < minimums.size(); i++) {
long min = asLong(minimums.get(i));
long max = asLong(maximums.get(i));
if (isStatisticsOverflow(type, min, max)) {
return Domain.create(ValueSet.all(type), hasNullValue);
}
ranges.add(Range.range(type, min, true, max, true));
}
checkArgument(!ranges.isEmpty(), "cannot use empty ranges");
return Domain.create(ValueSet.ofRanges(ranges), hasNullValue);
}
return Domain.create(ValueSet.all(type), hasNullValue);
}
Aggregations