use of org.apache.hadoop.io.DoubleWritable in project cdap by caskdata.
the class AggregationFunctionsTest method standardDeviationGenerateAggregationTest.
@Test
public void standardDeviationGenerateAggregationTest() throws Exception {
DataQualityWritable val1 = new DataQualityWritable();
val1.set(new DoubleWritable(2.0));
DataQualityWritable val2 = new DataQualityWritable();
val2.set(new DoubleWritable(5.0));
DataQualityWritable val3 = new DataQualityWritable();
val3.set(new DoubleWritable(10.0));
DataQualityWritable val4 = new DataQualityWritable();
val4.set(new DoubleWritable(52.0));
StandardDeviation standardDeviation = new StandardDeviation();
standardDeviation.add(val1);
standardDeviation.add(val2);
standardDeviation.add(val3);
standardDeviation.add(val4);
byte[] output = standardDeviation.aggregate();
Assert.assertEquals(20.265426, Bytes.toDouble(output), 0.001);
}
use of org.apache.hadoop.io.DoubleWritable in project presto by prestodb.
the class OrcTester method decodeRecordReaderValue.
private static Object decodeRecordReaderValue(Type type, Object actualValue) {
if (actualValue instanceof OrcLazyObject) {
try {
actualValue = ((OrcLazyObject) actualValue).materialize();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
if (actualValue instanceof BooleanWritable) {
actualValue = ((BooleanWritable) actualValue).get();
} else if (actualValue instanceof ByteWritable) {
actualValue = ((ByteWritable) actualValue).get();
} else if (actualValue instanceof BytesWritable) {
actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
} else if (actualValue instanceof DateWritable) {
actualValue = new SqlDate(((DateWritable) actualValue).getDays());
} else if (actualValue instanceof DoubleWritable) {
actualValue = ((DoubleWritable) actualValue).get();
} else if (actualValue instanceof FloatWritable) {
actualValue = ((FloatWritable) actualValue).get();
} else if (actualValue instanceof IntWritable) {
actualValue = ((IntWritable) actualValue).get();
} else if (actualValue instanceof HiveCharWritable) {
actualValue = ((HiveCharWritable) actualValue).getPaddedValue().toString();
} else if (actualValue instanceof LongWritable) {
actualValue = ((LongWritable) actualValue).get();
} else if (actualValue instanceof ShortWritable) {
actualValue = ((ShortWritable) actualValue).get();
} else if (actualValue instanceof HiveDecimalWritable) {
DecimalType decimalType = (DecimalType) type;
HiveDecimalWritable writable = (HiveDecimalWritable) actualValue;
// writable messes with the scale so rescale the values to the Presto type
BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale());
actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale());
} else if (actualValue instanceof Text) {
actualValue = actualValue.toString();
} else if (actualValue instanceof TimestampWritable) {
TimestampWritable timestamp = (TimestampWritable) actualValue;
actualValue = sqlTimestampOf((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L), SESSION);
} else if (actualValue instanceof OrcStruct) {
List<Object> fields = new ArrayList<>();
OrcStruct structObject = (OrcStruct) actualValue;
for (int fieldId = 0; fieldId < structObject.getNumFields(); fieldId++) {
fields.add(OrcUtil.getFieldValue(structObject, fieldId));
}
actualValue = decodeRecordReaderStruct(type, fields);
} else if (actualValue instanceof com.facebook.hive.orc.OrcStruct) {
List<Object> fields = new ArrayList<>();
com.facebook.hive.orc.OrcStruct structObject = (com.facebook.hive.orc.OrcStruct) actualValue;
for (int fieldId = 0; fieldId < structObject.getNumFields(); fieldId++) {
fields.add(structObject.getFieldValue(fieldId));
}
actualValue = decodeRecordReaderStruct(type, fields);
} else if (actualValue instanceof List) {
actualValue = decodeRecordReaderList(type, ((List<?>) actualValue));
} else if (actualValue instanceof Map) {
actualValue = decodeRecordReaderMap(type, (Map<?, ?>) actualValue);
}
return actualValue;
}
use of org.apache.hadoop.io.DoubleWritable in project druid by druid-io.
the class OrcStructConverterTest method testConvertRootFieldWithNonNullDoubleReturningOriginalValue.
@Test
public void testConvertRootFieldWithNonNullDoubleReturningOriginalValue() {
final OrcStructConverter converter = new OrcStructConverter(false);
assertConversion(converter, TypeDescription.createDouble(), 1.0d, new DoubleWritable(1.0d));
}
use of org.apache.hadoop.io.DoubleWritable in project hive by apache.
the class TestETypeConverter method testGetIntConverterForDouble.
@Test
public void testGetIntConverterForDouble() throws Exception {
PrimitiveType primitiveType = Types.optional(PrimitiveTypeName.INT32).named("value");
Writable writable = getWritableFromPrimitiveConverter(createHiveTypeInfo("double"), primitiveType, 22225);
DoubleWritable doubleWritable = (DoubleWritable) writable;
assertEquals((double) 22225, (double) doubleWritable.get(), 0);
}
use of org.apache.hadoop.io.DoubleWritable in project hive by apache.
the class TestETypeConverter method testGetDecimalConverterDoubleHiveType.
@Test
public void testGetDecimalConverterDoubleHiveType() throws Exception {
PrimitiveType primitiveType = Types.optional(PrimitiveTypeName.BINARY).as(LogicalTypeAnnotation.decimalType(2, 7)).named("value");
Writable writable = getWritableFromPrimitiveConverter(createHiveTypeInfo("double"), primitiveType, 2200);
DoubleWritable doubleWritable = (DoubleWritable) writable;
assertEquals(22, (int) doubleWritable.get());
}
Aggregations