Search in sources :

Example 1 with TypeInfoFactory.getDecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getDecimalTypeInfo in project hive by apache.

the class ExprNodeDescExprFactory method adjustType.

private DecimalTypeInfo adjustType(HiveDecimal hd) {
    // Note: the normalize() call with rounding in HiveDecimal will currently reduce the
    // precision and scale of the value by throwing away trailing zeroes. This may or may
    // not be desirable for the literals; however, this used to be the default behavior
    // for explicit decimal literals (e.g. 1.0BD), so we keep this behavior for now.
    int prec = 1;
    int scale = 0;
    if (hd != null) {
        prec = hd.precision();
        scale = hd.scale();
    }
    DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(prec, scale);
    return typeInfo;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)

Example 2 with TypeInfoFactory.getDecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getDecimalTypeInfo in project hive by apache.

the class TestParquetFilterPredicate method testFilterComplexTypes.

/**
 * Check the converted filter predicate is null if unsupported types are included
 * @throws Exception
 */
@Test
public void testFilterComplexTypes() throws Exception {
    SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("x", PredicateLeaf.Type.DATE, Date.valueOf("1970-1-11")).lessThanEquals("y", PredicateLeaf.Type.STRING, new HiveChar("hi", 10).toString()).equals("z", PredicateLeaf.Type.DECIMAL, new HiveDecimalWritable("1.0")).end().build();
    MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required int32 x; required binary y; required binary z;}");
    Map<String, TypeInfo> columnTypes = new HashMap<>();
    columnTypes.put("x", TypeInfoFactory.getPrimitiveTypeInfo("date"));
    columnTypes.put("y", TypeInfoFactory.getCharTypeInfo(10));
    columnTypes.put("z", TypeInfoFactory.getDecimalTypeInfo(4, 2));
    assertEquals(null, ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes));
    sarg = SearchArgumentFactory.newBuilder().startNot().startOr().isNull("x", PredicateLeaf.Type.LONG).between("y", PredicateLeaf.Type.DECIMAL, new HiveDecimalWritable("10"), new HiveDecimalWritable("20.0")).in("z", PredicateLeaf.Type.LONG, 1L, 2L, 3L).nullSafeEquals("a", PredicateLeaf.Type.STRING, new HiveVarchar("stinger", 100).toString()).end().end().build();
    schema = MessageTypeParser.parseMessageType("message test {" + " optional int32 x; required binary y; required int32 z;" + " optional binary a;}");
    columnTypes = new HashMap<>();
    columnTypes.put("x", TypeInfoFactory.getPrimitiveTypeInfo("int"));
    columnTypes.put("y", TypeInfoFactory.getDecimalTypeInfo(4, 2));
    columnTypes.put("z", TypeInfoFactory.getPrimitiveTypeInfo("int"));
    columnTypes.put("z", TypeInfoFactory.getCharTypeInfo(100));
    assertEquals(null, ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes));
}
Also used : HashMap(java.util.HashMap) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 3 with TypeInfoFactory.getDecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getDecimalTypeInfo in project hive by apache.

the class TestParquetFilterPredicate method testFilterComplexTypes2.

/**
 * Check the converted filter predicate is null if unsupported types are included
 * @throws Exception
 */
@Test
public void testFilterComplexTypes2() throws Exception {
    SearchArgument sarg = SearchArgumentFactory.newBuilder().startAnd().lessThan("x", PredicateLeaf.Type.DATE, Date.valueOf("2005-3-12")).lessThanEquals("y", PredicateLeaf.Type.STRING, new HiveChar("hi", 10).toString()).equals("z", PredicateLeaf.Type.DECIMAL, new HiveDecimalWritable("1.0")).end().build();
    MessageType schema = MessageTypeParser.parseMessageType("message test {" + " required int32 x; required binary y; required binary z;}");
    Map<String, TypeInfo> columnTypes = new HashMap<>();
    columnTypes.put("x", TypeInfoFactory.getPrimitiveTypeInfo("date"));
    columnTypes.put("y", TypeInfoFactory.getCharTypeInfo(10));
    columnTypes.put("z", TypeInfoFactory.getDecimalTypeInfo(4, 2));
    assertEquals(null, ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes));
    sarg = SearchArgumentFactory.newBuilder().startNot().startOr().isNull("x", PredicateLeaf.Type.LONG).between("y", PredicateLeaf.Type.DECIMAL, new HiveDecimalWritable("10"), new HiveDecimalWritable("20.0")).in("z", PredicateLeaf.Type.LONG, 1L, 2L, 3L).nullSafeEquals("a", PredicateLeaf.Type.STRING, new HiveVarchar("stinger", 100).toString()).end().end().build();
    schema = MessageTypeParser.parseMessageType("message test {" + " optional int32 x; required binary y; required int32 z;" + " optional binary a;}");
    columnTypes = new HashMap<>();
    columnTypes.put("x", TypeInfoFactory.getPrimitiveTypeInfo("int"));
    columnTypes.put("y", TypeInfoFactory.getDecimalTypeInfo(4, 2));
    columnTypes.put("z", TypeInfoFactory.getPrimitiveTypeInfo("int"));
    columnTypes.put("a", TypeInfoFactory.getCharTypeInfo(100));
    assertEquals(null, ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema, columnTypes));
}
Also used : HashMap(java.util.HashMap) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) SearchArgument(org.apache.hadoop.hive.ql.io.sarg.SearchArgument) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MessageType(org.apache.parquet.schema.MessageType) Test(org.junit.Test)

Example 4 with TypeInfoFactory.getDecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getDecimalTypeInfo in project hive by apache.

the class TestGenericUDFCeil method testDecimal.

@Test
public void testDecimal() throws HiveException {
    GenericUDFCeil udf = new GenericUDFCeil();
    HiveDecimalWritable input = new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
    DecimalTypeInfo inputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(11, 6);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
    DeferredObject[] args = { new DeferredJavaObject(input) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6, 0), oi.getTypeInfo());
    HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
    Assert.assertEquals(HiveDecimal.create("32301"), res.getHiveDecimal());
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) Test(org.junit.Test)

Example 5 with TypeInfoFactory.getDecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getDecimalTypeInfo in project hive by apache.

the class TestGenericUDFAbs method testHiveDecimal.

@Test
public void testHiveDecimal() throws HiveException {
    GenericUDFAbs udf = new GenericUDFAbs();
    int prec = 12;
    int scale = 9;
    ObjectInspector valueOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(prec, scale));
    ObjectInspector[] arguments = { valueOI };
    PrimitiveObjectInspector outputOI = (PrimitiveObjectInspector) udf.initialize(arguments);
    // Make sure result precision/scale matches the input prec/scale
    assertEquals("result precision for abs()", prec, outputOI.precision());
    assertEquals("result scale for abs()", scale, outputOI.scale());
    DeferredObject valueObj = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("107.123456789")));
    DeferredObject[] args = { valueObj };
    HiveDecimalWritable output = (HiveDecimalWritable) udf.evaluate(args);
    assertEquals("abs() test for HiveDecimal failed ", 107.123456789, output.getHiveDecimal().doubleValue(), 1e-15);
    valueObj = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("-107.123456789")));
    args[0] = valueObj;
    output = (HiveDecimalWritable) udf.evaluate(args);
    assertEquals("abs() test for HiveDecimal failed ", 107.123456789, output.getHiveDecimal().doubleValue(), 1e-15);
    // null input
    args[0] = new DeferredJavaObject(null);
    output = (HiveDecimalWritable) udf.evaluate(args);
    assertEquals("abs(null)", null, output);
    // if value too large, should also be null
    args[0] = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("-1000.123456")));
    output = (HiveDecimalWritable) udf.evaluate(args);
    assertEquals("abs() of too large decimal value", null, output);
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) GenericUDFAbs(org.apache.hadoop.hive.ql.udf.generic.GenericUDFAbs) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) Test(org.junit.Test)

Aggregations

Test (org.junit.Test)40 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)38 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)38 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)27 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)25 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)25 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)11 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)7 LongWritable (org.apache.hadoop.io.LongWritable)5 BaseScalarUdfTest (org.apache.hadoop.hive.ql.testutil.BaseScalarUdfTest)3 IntWritable (org.apache.hadoop.io.IntWritable)3 HashMap (java.util.HashMap)2 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)2 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)2 SearchArgument (org.apache.hadoop.hive.ql.io.sarg.SearchArgument)2 TypeInfo (org.apache.hadoop.hive.serde2.typeinfo.TypeInfo)2 MessageType (org.apache.parquet.schema.MessageType)2 UDFArgumentException (org.apache.hadoop.hive.ql.exec.UDFArgumentException)1 UDFArgumentTypeException (org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException)1 GenericUDFAbs (org.apache.hadoop.hive.ql.udf.generic.GenericUDFAbs)1