use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo in project hive by apache.
the class TestGenericUDFOPPositive method testChar.
@Test
public void testChar() throws HiveException {
GenericUDFOPPositive udf = new GenericUDFOPPositive();
HiveChar vc = new HiveChar("32300.004747", 12);
HiveCharWritable input = new HiveCharWritable(vc);
CharTypeInfo inputTypeInfo = TypeInfoFactory.getCharTypeInfo(12);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(32300.004747, res.get(), EPSILON);
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo in project hive by apache.
the class TestGenericUDFPrintf method testCharFormat.
@Test
public void testCharFormat() throws HiveException {
GenericUDFPrintf udf = new GenericUDFPrintf();
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getCharTypeInfo(10)), PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getVarcharTypeInfo(7)) };
HiveCharWritable formatChar = new HiveCharWritable();
formatChar.set("arg1=%s");
HiveVarcharWritable argVarchar = new HiveVarcharWritable();
argVarchar.set("world");
DeferredObject[] args = { new DeferredJavaObject(formatChar), new DeferredJavaObject(argVarchar) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(PrimitiveObjectInspectorFactory.writableStringObjectInspector, oi);
Text res = (Text) udf.evaluate(args);
Assert.assertEquals("arg1=world", res.toString());
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo in project hive by apache.
the class TestParquetFilterPredicate method testFilterCharColumnGreaterThan.
@Test
public void testFilterCharColumnGreaterThan() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().startNot().lessThanEquals("a", PredicateLeaf.Type.STRING, new HiveChar("apple", 10).toString()).end().build();
MessageType schema = MessageTypeParser.parseMessageType("message test {required binary a;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "not(lteq(a, Binary{\"apple\"}))";
assertEquals(expected, p.toString());
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo in project hive by apache.
the class TestParquetFilterPredicate method testFilterCharColumnBetween.
@Test
public void testFilterCharColumnBetween() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().between("a", PredicateLeaf.Type.STRING, new HiveChar("apple", 10).toString(), new HiveChar("pear", 10).toString()).build();
MessageType schema = MessageTypeParser.parseMessageType("message test {required binary a;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "and(lteq(a, Binary{\"pear\"}), not(lt(a, Binary{\"apple\"})))";
assertEquals(expected, p.toString());
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getCharTypeInfo in project hive by apache.
the class TestParquetFilterPredicate method testFilterCharColumnEquals.
@Test
public void testFilterCharColumnEquals() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().equals("a", PredicateLeaf.Type.STRING, new HiveChar("apple", 10).toString()).build();
MessageType schema = MessageTypeParser.parseMessageType("message test {required binary a;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(10));
FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes);
String expected = "eq(a, Binary{\"apple\"})";
assertEquals(expected, p.toString());
}
Aggregations