use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestGenericUDFOPDivide method testDecimalDivideDecimal2.
@Test
public void testDecimalDivideDecimal2() throws HiveException {
GenericUDFOPDivide udf = new GenericUDFOPDivide();
HiveDecimalWritable left = new HiveDecimalWritable(HiveDecimal.create("5"));
HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("25"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(1, 0)), PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(2, 0)) };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(7, 6), oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("0.2"), res.getHiveDecimal());
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestGenericUDFOPMinus method testDouleMinusDecimal.
@Test
public void testDouleMinusDecimal() throws HiveException {
GenericUDFOPMinus udf = new GenericUDFOPMinus();
DoubleWritable left = new DoubleWritable(74.52);
HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5, 2)) };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(new Double(-160.45), new Double(res.get()));
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestGenericUDFOPMinus method testDecimalMinusDecimal.
@Test
public void testDecimalMinusDecimal() throws HiveException {
GenericUDFOPMinus udf = new GenericUDFOPMinus();
HiveDecimalWritable left = new HiveDecimalWritable(HiveDecimal.create("14.5"));
HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("234.97"));
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(3, 1)), PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5, 2)) };
DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6, 2), oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("-220.47"), res.getHiveDecimal());
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestGenericUDFRound method getExpectedResult.
@Override
public InspectableObject[] getExpectedResult() {
DataBuilder db = new DataBuilder();
db.setColumnNames("_col1", "_col2", "_col3", "_col4", "_col5", "_col6", "_col7", "_col8");
db.setColumnTypes(PrimitiveObjectInspectorFactory.javaStringObjectInspector, PrimitiveObjectInspectorFactory.writableIntObjectInspector, PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.writableFloatObjectInspector, PrimitiveObjectInspectorFactory.writableByteObjectInspector, PrimitiveObjectInspectorFactory.writableShortObjectInspector, PrimitiveObjectInspectorFactory.writableLongObjectInspector, PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector);
db.addRow(null, new IntWritable(170), new DoubleWritable(1.1), new FloatWritable(32f), new ByteWritable((byte) 0), new ShortWritable((short) 1234), new LongWritable(123500L), new HiveDecimalWritable(HiveDecimal.create("983.724")));
db.addRow(new DoubleWritable(-200), null, null, new FloatWritable(0f), new ByteWritable((byte) 100), new ShortWritable((short) 551), new LongWritable(900L), new HiveDecimalWritable(HiveDecimal.create("983723.005")));
db.addRow(new DoubleWritable(500), new IntWritable(22345), new DoubleWritable(-23.000), new FloatWritable(-3f), new ByteWritable((byte) 100), new ShortWritable((short) 2321), new LongWritable(9200L), new HiveDecimalWritable(HiveDecimal.create("-932032.7")));
return db.createRows();
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestGenericUDFOPNegative method testDecimal.
@Test
public void testDecimal() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
HiveDecimalWritable input = new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
DecimalTypeInfo inputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(11, 6);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(inputTypeInfo, oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("-32300.004747"), res.getHiveDecimal());
}
Aggregations