use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getDecimalTypeInfo in project hive by apache.
the class ExprNodeDescExprFactory method adjustType.
private DecimalTypeInfo adjustType(HiveDecimal hd) {
// Note: the normalize() call with rounding in HiveDecimal will currently reduce the
// precision and scale of the value by throwing away trailing zeroes. This may or may
// not be desirable for the literals; however, this used to be the default behavior
// for explicit decimal literals (e.g. 1.0BD), so we keep this behavior for now.
int prec = 1;
int scale = 0;
if (hd != null) {
prec = hd.precision();
scale = hd.scale();
}
DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(prec, scale);
return typeInfo;
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getDecimalTypeInfo in project hive by apache.
the class TestParquetFilterPredicate method testFilterComplexTypes.
/**
* Check the converted filter predicate is null if unsupported types are included
* @throws Exception
*/
@Test
public void testFilterComplexTypes() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("x", PredicateLeaf.Type.DATE, Date.valueOf("1970-1-11")).lessThanEquals("y", PredicateLeaf.Type.STRING, new HiveChar("hi", 10).toString()).equals("z", PredicateLeaf.Type.DECIMAL, new HiveDecimalWritable("1.0")).end().build();
MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required int32 x; required binary y; required binary z;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("x", TypeInfoFactory.getPrimitiveTypeInfo("date"));
columnTypes.put("y", TypeInfoFactory.getCharTypeInfo(10));
columnTypes.put("z", TypeInfoFactory.getDecimalTypeInfo(4, 2));
assertEquals(null, ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes));
sarg = SearchArgumentFactory.newBuilder().startNot().startOr().isNull("x", PredicateLeaf.Type.LONG).between("y", PredicateLeaf.Type.DECIMAL, new HiveDecimalWritable("10"), new HiveDecimalWritable("20.0")).in("z", PredicateLeaf.Type.LONG, 1L, 2L, 3L).nullSafeEquals("a", PredicateLeaf.Type.STRING, new HiveVarchar("stinger", 100).toString()).end().end().build();
schema = MessageTypeParser.parseMessageType("message test {" + " optional int32 x; required binary y; required int32 z;" + " optional binary a;}");
columnTypes = new HashMap<>();
columnTypes.put("x", TypeInfoFactory.getPrimitiveTypeInfo("int"));
columnTypes.put("y", TypeInfoFactory.getDecimalTypeInfo(4, 2));
columnTypes.put("z", TypeInfoFactory.getPrimitiveTypeInfo("int"));
columnTypes.put("z", TypeInfoFactory.getCharTypeInfo(100));
assertEquals(null, ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes));
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getDecimalTypeInfo in project hive by apache.
the class TestParquetFilterPredicate method testFilterComplexTypes2.
/**
* Check the converted filter predicate is null if unsupported types are included
* @throws Exception
*/
@Test
public void testFilterComplexTypes2() throws Exception {
SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("x", PredicateLeaf.Type.DATE, Date.valueOf("2005-3-12")).lessThanEquals("y", PredicateLeaf.Type.STRING, new HiveChar("hi", 10).toString()).equals("z", PredicateLeaf.Type.DECIMAL, new HiveDecimalWritable("1.0")).end().build();
MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required int32 x; required binary y; required binary z;}");
Map<String, TypeInfo> columnTypes = new HashMap<>();
columnTypes.put("x", TypeInfoFactory.getPrimitiveTypeInfo("date"));
columnTypes.put("y", TypeInfoFactory.getCharTypeInfo(10));
columnTypes.put("z", TypeInfoFactory.getDecimalTypeInfo(4, 2));
assertEquals(null, ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes));
sarg = SearchArgumentFactory.newBuilder().startNot().startOr().isNull("x", PredicateLeaf.Type.LONG).between("y", PredicateLeaf.Type.DECIMAL, new HiveDecimalWritable("10"), new HiveDecimalWritable("20.0")).in("z", PredicateLeaf.Type.LONG, 1L, 2L, 3L).nullSafeEquals("a", PredicateLeaf.Type.STRING, new HiveVarchar("stinger", 100).toString()).end().end().build();
schema = MessageTypeParser.parseMessageType("message test {" + " optional int32 x; required binary y; required int32 z;" + " optional binary a;}");
columnTypes = new HashMap<>();
columnTypes.put("x", TypeInfoFactory.getPrimitiveTypeInfo("int"));
columnTypes.put("y", TypeInfoFactory.getDecimalTypeInfo(4, 2));
columnTypes.put("z", TypeInfoFactory.getPrimitiveTypeInfo("int"));
columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(100));
assertEquals(null, ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes));
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getDecimalTypeInfo in project hive by apache.
the class TestGenericUDFCeil method testDecimal.
@Test
public void testDecimal() throws HiveException {
GenericUDFCeil udf = new GenericUDFCeil();
HiveDecimalWritable input = new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
DecimalTypeInfo inputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(11, 6);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6, 0), oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("32301"), res.getHiveDecimal());
}
use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getDecimalTypeInfo in project hive by apache.
the class TestGenericUDFAbs method testHiveDecimal.
@Test
public void testHiveDecimal() throws HiveException {
GenericUDFAbs udf = new GenericUDFAbs();
int prec = 12;
int scale = 9;
ObjectInspector valueOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(prec, scale));
ObjectInspector[] arguments = { valueOI };
PrimitiveObjectInspector outputOI = (PrimitiveObjectInspector) udf.initialize(arguments);
// Make sure result precision/scale matches the input prec/scale
assertEquals("result precision for abs()", prec, outputOI.precision());
assertEquals("result scale for abs()", scale, outputOI.scale());
DeferredObject valueObj = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("107.123456789")));
DeferredObject[] args = { valueObj };
HiveDecimalWritable output = (HiveDecimalWritable) udf.evaluate(args);
assertEquals("abs() test for HiveDecimal failed ", 107.123456789, output.getHiveDecimal().doubleValue(), 1e-15);
valueObj = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("-107.123456789")));
args[0] = valueObj;
output = (HiveDecimalWritable) udf.evaluate(args);
assertEquals("abs() test for HiveDecimal failed ", 107.123456789, output.getHiveDecimal().doubleValue(), 1e-15);
// null input
args[0] = new DeferredJavaObject(null);
output = (HiveDecimalWritable) udf.evaluate(args);
assertEquals("abs(null)", null, output);
// if value too large, should also be null
args[0] = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("-1000.123456")));
output = (HiveDecimalWritable) udf.evaluate(args);
assertEquals("abs() of too large decimal value", null, output);
}
Aggregations