use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.
the class TestUDFMath method testAcos.
@Test
public void testAcos() throws HiveException {
UDFAcos udf = new UDFAcos();
input = createDecimal("0.716");
DoubleWritable res = udf.evaluate(input);
Assert.assertEquals(0.7727408115633954, res.get(), 0.000001);
}
use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.
the class TestUDFMath method testLog.
@Test
public void testLog() throws HiveException {
UDFLog udf = new UDFLog();
input = createDecimal("7.38905609893065");
DoubleWritable res = udf.evaluate(input);
Assert.assertEquals(2.0, res.get(), 0.000001);
res = udf.evaluate(createDecimal("3.0"), createDecimal("9.0"));
Assert.assertEquals(2.0, res.get(), 0.000001);
}
use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.
the class WritableHiveCharObjectInspector method getPrimitiveWritableObject.
@Override
public HiveCharWritable getPrimitiveWritableObject(Object o) {
// then output new writable with correct params.
if (o == null) {
return null;
}
if ((o instanceof Text) || (o instanceof TimestampWritableV2) || (o instanceof HiveDecimalWritable) || (o instanceof DoubleWritable) || (o instanceof FloatWritable) || (o instanceof LongWritable) || (o instanceof IntWritable) || (o instanceof BooleanWritable)) {
String str = o.toString();
HiveCharWritable hcw = new HiveCharWritable();
hcw.set(str, ((CharTypeInfo) typeInfo).getLength());
return hcw;
}
HiveCharWritable writable = ((HiveCharWritable) o);
if (doesWritableMatchTypeParams((HiveCharWritable) o)) {
return writable;
}
return getWritableWithParams(writable);
}
use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.
the class TestGenericUDAFPercentileDisc method checkPercentile.
private void checkPercentile(Long[] items, double percentile, double expected) throws Exception {
PercentileContLongEvaluator eval = new GenericUDAFPercentileDisc.PercentileDiscLongEvaluator();
PercentileAgg agg = new PercentileContLongEvaluator().new PercentileAgg();
agg.percentiles = new ArrayList<DoubleWritable>();
agg.percentiles.add(new DoubleWritable(percentile));
agg.isAscending = true;
for (int i = 0; i < items.length; i++) {
eval.increment(agg, new LongWritable(items[i]), 1);
}
DoubleWritable result = (DoubleWritable) eval.terminate(agg);
Assert.assertEquals(expected, result.get(), 0.01);
eval.close();
}
use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project hive by apache.
the class TestGenericUDFNullif method testConversionInSameGroup.
@Test
public void testConversionInSameGroup() throws HiveException {
GenericUDFNullif udf = new GenericUDFNullif();
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.writableByteObjectInspector };
DeferredObject[] args = { new DeferredJavaObject(new DoubleWritable(4.0)), new DeferredJavaObject(new ByteWritable((byte) 4)) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
}
Aggregations