Search in sources :

Example 71 with HiveChar

use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.

the class TestParquetFilterPredicate method testFilterCharColumnEquals.

@Test
public void testFilterCharColumnEquals() throws Exception {
    SearchArgument sarg = SearchArgumentFactory.newBuilder().equals("a", PredicateLeaf.Type.STRING, new HiveChar("apple", 10).toString()).build();
    MessageType schema = MessageTypeParser.parseMessageType("message test {required binary a;}");
    Map<String, TypeInfo> columnTypes = new HashMap<>();
    columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
    FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
    String expected = "eq(a, Binary{\"apple\"})";
    assertEquals(expected, p.toString());
}
Also used : HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 72 with HiveChar

use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.

the class TestParquetFilterPredicate method testFilterCharColumnWhiteSpacePostfix.

@Test
public void testFilterCharColumnWhiteSpacePostfix() throws Exception {
    SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("a", PredicateLeaf.Type.STRING, new HiveChar("apple  ", 10).toString()).lessThanEquals("b", PredicateLeaf.Type.STRING, new HiveChar("pear  ", 10).toString()).equals("c", PredicateLeaf.Type.STRING, new HiveChar("orange  ", 10).toString()).nullSafeEquals("d", PredicateLeaf.Type.STRING, new HiveChar("pineapple ", 10).toString()).end().build();
    MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required binary a; required binary b;" + " required binary c; required binary d;}");
    Map<String, TypeInfo> columnTypes = new HashMap<>();
    columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
    columnTypes.put("b", TypeInfoFactory.getCharTypeInfo(10));
    columnTypes.put("c", TypeInfoFactory.getCharTypeInfo(10));
    columnTypes.put("d", TypeInfoFactory.getCharTypeInfo(10));
    FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
    String expected = "and(and(and(" + "lt(a, Binary{\"apple\"}), " + "lteq(b, Binary{\"pear\"})), " + "eq(c, Binary{\"orange\"})), " + "eq(d, Binary{\"pineapple\"}))";
    assertEquals(expected, p.toString());
}
Also used : HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 73 with HiveChar

use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.

the class TestParquetFilterPredicate method testFilterCharColumnLessThan.

@Test
public void testFilterCharColumnLessThan() throws Exception {
    SearchArgument sarg = SearchArgumentFactory.newBuilder().lessThan("a", PredicateLeaf.Type.STRING, new HiveChar("apple", 10).toString()).build();
    MessageType schema = MessageTypeParser.parseMessageType("message test {required binary a;}");
    Map<String, TypeInfo> columnTypes = new HashMap<>();
    columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
    FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
    String expected = "lt(a, Binary{\"apple\"})";
    assertEquals(expected, p.toString());
}
Also used : HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 74 with HiveChar

use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.

the class TestParquetFilterPredicate method testFilterCharColumnNullSafeEquals.

@Test
public void testFilterCharColumnNullSafeEquals() throws Exception {
    SearchArgument sarg = SearchArgumentFactory.newBuilder().nullSafeEquals("a", PredicateLeaf.Type.STRING, new HiveChar("apple", 10).toString()).build();
    MessageType schema = MessageTypeParser.parseMessageType("message test {required binary a;}");
    Map<String, TypeInfo> columnTypes = new HashMap<>();
    columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
    FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
    String expected = "eq(a, Binary{\"apple\"})";
    assertEquals(expected, p.toString());
}
Also used : HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 75 with HiveChar

use of org.apache.hadoop.hive.common.type.HiveChar in project hive by apache.

the class TestParquetFilterPredicate method testFilterFloatColumn.

@Test
public void testFilterFloatColumn() throws Exception {
    SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("x", PredicateLeaf.Type.LONG, 22L).lessThan("x1", PredicateLeaf.Type.LONG, 22L).lessThanEquals("y", PredicateLeaf.Type.STRING, new HiveChar("hi", 10).toString()).equals("z", PredicateLeaf.Type.FLOAT, Double.valueOf(0.22)).equals("z1", PredicateLeaf.Type.FLOAT, Double.valueOf(0.22)).end().build();
    MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required int32 x; required int32 x1;" + " required binary y; required float z; required float z1;}");
    Map<String, TypeInfo> columnTypes = new HashMap<>();
    columnTypes.put("x", TypeInfoFactory.getPrimitiveTypeInfo("int"));
    columnTypes.put("x1", TypeInfoFactory.getPrimitiveTypeInfo("int"));
    columnTypes.put("y", TypeInfoFactory.getCharTypeInfo(10));
    columnTypes.put("z", TypeInfoFactory.getPrimitiveTypeInfo("float"));
    columnTypes.put("z1", TypeInfoFactory.getPrimitiveTypeInfo("float"));
    FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
    String expected = "and(and(and(and(lt(x, 22), lt(x1, 22))," + " lteq(y, Binary{\"hi\"})), eq(z, " + "0.22)), eq(z1, 0.22))";
    assertEquals(expected, p.toString());
}
Also used : HashMap(java.util.HashMap) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) FilterPredicate(org.apache.parquet.filter2.predicate.FilterPredicate) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Aggregations

HiveChar (org.apache.hadoop.hive.common.type.HiveChar)97 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)61 Test (org.junit.Test)38 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)29 Text (org.apache.hadoop.io.Text)26 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)25 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)24 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)22 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)21 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)21 HashMap (java.util.HashMap)20 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)20 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)20 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)19 LongWritable (org.apache.hadoop.io.LongWritable)19 ArrayList (java.util.ArrayList)18 Date (org.apache.hadoop.hive.common.type.Date)18 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)17 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)17 BytesWritable (org.apache.hadoop.io.BytesWritable)17