use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class TestParquetFilterPredicate method testFilterStringColumnWithWhiteSpaces.
@Test
public void testFilterStringColumnWithWhiteSpaces() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("a", PredicateLeaf.Type.STRING, new HiveVarchar(" apple ", 10).toString()).lessThanEquals("b", PredicateLeaf.Type.STRING, new HiveVarchar(" pear", 10).toString()).equals("c", PredicateLeaf.Type.STRING, new HiveVarchar("orange ", 10).toString()).end().build();
MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required binary a; required binary b;" + " required binary c;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getPrimitiveTypeInfo("string"));
columnTypes.put("b", TypeInfoFactory.getPrimitiveTypeInfo("string"));
columnTypes.put("c", TypeInfoFactory.getPrimitiveTypeInfo("string"));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "and(and(" + "lt(a, Binary{\" apple \"}), " + "lteq(b, Binary{\" pear\"})), " + "eq(c, Binary{\"orange \"}))";
assertEquals(expected, p.toString());
}
use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class TestParquetFilterPredicate method testFilterMoreComplexVarCharColumn.
@Test
public void testFilterMoreComplexVarCharColumn() throws Exception {
// ((a=pear or a<=cherry) and (b=orange)) and (c=banana or d<cherry)
SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().startAnd().startOr().equals("a", PredicateLeaf.Type.STRING, new HiveVarchar("pear", 10).toString()).lessThanEquals("a", PredicateLeaf.Type.STRING, new HiveVarchar("cherry", 10).toString()).end().equals("b", PredicateLeaf.Type.STRING, new HiveVarchar("orange", 10).toString()).end().startOr().equals("c", PredicateLeaf.Type.STRING, new HiveVarchar("banana", 10).toString()).lessThan("d", PredicateLeaf.Type.STRING, new HiveVarchar("cherry", 10).toString()).end().end().build();
MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required binary a; required binary b;" + " required binary c; required binary d;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("b", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("c", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("d", TypeInfoFactory.getVarcharTypeInfo(10));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "and(and(or(eq(a, Binary{\"pear\"}), lteq(a, Binary{\"cherry\"})), " + "eq(b, Binary{\"orange\"})), " + "or(eq(c, Binary{\"banana\"}), lt(d, Binary{\"cherry\"})))";
assertEquals(expected, p.toString());
}
use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class TestParquetFilterPredicate method testFilterVarCharColumnWithWhiteSpaces.
@Test
public void testFilterVarCharColumnWithWhiteSpaces() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("a", PredicateLeaf.Type.STRING, new HiveVarchar(" apple ", 10).toString()).lessThanEquals("b", PredicateLeaf.Type.STRING, new HiveVarchar(" pear", 10).toString()).equals("c", PredicateLeaf.Type.STRING, new HiveVarchar("orange ", 10).toString()).end().build();
MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required binary a; required binary b;" + " required binary c;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("b", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("c", TypeInfoFactory.getVarcharTypeInfo(10));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "and(and(" + "lt(a, Binary{\" apple \"}), " + "lteq(b, Binary{\" pear\"})), " + "eq(c, Binary{\"orange \"}))";
assertEquals(expected, p.toString());
}
use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class TestParquetFilterPredicate method testFilterVarCharColumn.
@Test
public void testFilterVarCharColumn() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("a", PredicateLeaf.Type.STRING, new HiveVarchar("apple", 10).toString()).lessThanEquals("b", PredicateLeaf.Type.STRING, new HiveVarchar("pear", 10).toString()).equals("c", PredicateLeaf.Type.STRING, new HiveVarchar("orange", 10).toString()).nullSafeEquals("d", PredicateLeaf.Type.STRING, new HiveVarchar("pineapple", 9).toString()).in("e", PredicateLeaf.Type.STRING, new HiveVarchar("cherry", 10).toString(), new HiveVarchar("orange", 10).toString()).between("f", PredicateLeaf.Type.STRING, new HiveVarchar("apple", 10).toString(), new HiveVarchar("pear", 10).toString()).isNull("g", PredicateLeaf.Type.STRING).end().build();
MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required binary a; required binary b;" + " required binary c; required binary d;" + " required binary e; required binary f;" + " required binary g;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("b", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("c", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("d", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("e", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("f", TypeInfoFactory.getVarcharTypeInfo(10));
columnTypes.put("g", TypeInfoFactory.getVarcharTypeInfo(10));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "and(and(and(and(and(and(" + "lt(a, Binary{\"apple\"}), " + "lteq(b, Binary{\"pear\"})), " + "eq(c, Binary{\"orange\"})), " + "eq(d, Binary{\"pineapple\"})), " + "or(eq(e, Binary{\"cherry\"}), eq(e, Binary{\"orange\"}))), " + "and(lteq(f, Binary{\"pear\"}), not(lt(f, Binary{\"apple\"})))), " + "eq(g, null))";
assertEquals(expected, p.toString());
}
use of org.apache.hadoop.hive.common.type.HiveVarchar in project hive by apache.
the class TestGenericUDFOPNegative method testVarchar.
@Test
public void testVarchar() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
HiveVarchar vc = new HiveVarchar("32300.004747", 12);
HiveVarcharWritable input = new HiveVarcharWritable(vc);
VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(-32300.004747, res.get(), EPSILON);
}
Aggregations