Search in sources :

Example 6 with DoubleWritable

use of org.apache.hadoop.io.DoubleWritable in project cdap by caskdata.

the class AggregationFunctionsTest method standardDeviationGenerateAggregationTest.

@Test
public void standardDeviationGenerateAggregationTest() throws Exception {
    DataQualityWritable val1 = new DataQualityWritable();
    val1.set(new DoubleWritable(2.0));
    DataQualityWritable val2 = new DataQualityWritable();
    val2.set(new DoubleWritable(5.0));
    DataQualityWritable val3 = new DataQualityWritable();
    val3.set(new DoubleWritable(10.0));
    DataQualityWritable val4 = new DataQualityWritable();
    val4.set(new DoubleWritable(52.0));
    StandardDeviation standardDeviation = new StandardDeviation();
    standardDeviation.add(val1);
    standardDeviation.add(val2);
    standardDeviation.add(val3);
    standardDeviation.add(val4);
    byte[] output = standardDeviation.aggregate();
    Assert.assertEquals(20.265426, Bytes.toDouble(output), 0.001);
}
Also used : DoubleWritable(org.apache.hadoop.io.DoubleWritable) StandardDeviation(co.cask.cdap.dq.functions.StandardDeviation) DataQualityWritable(co.cask.cdap.dq.DataQualityWritable) Test(org.junit.Test)

Example 7 with DoubleWritable

use of org.apache.hadoop.io.DoubleWritable in project presto by prestodb.

the class OrcTester method decodeRecordReaderValue.

private static Object decodeRecordReaderValue(Type type, Object actualValue) {
    if (actualValue instanceof OrcLazyObject) {
        try {
            actualValue = ((OrcLazyObject) actualValue).materialize();
        } catch (IOException e) {
            throw new UncheckedIOException(e);
        }
    }
    if (actualValue instanceof BooleanWritable) {
        actualValue = ((BooleanWritable) actualValue).get();
    } else if (actualValue instanceof ByteWritable) {
        actualValue = ((ByteWritable) actualValue).get();
    } else if (actualValue instanceof BytesWritable) {
        actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
    } else if (actualValue instanceof DateWritable) {
        actualValue = new SqlDate(((DateWritable) actualValue).getDays());
    } else if (actualValue instanceof DoubleWritable) {
        actualValue = ((DoubleWritable) actualValue).get();
    } else if (actualValue instanceof FloatWritable) {
        actualValue = ((FloatWritable) actualValue).get();
    } else if (actualValue instanceof IntWritable) {
        actualValue = ((IntWritable) actualValue).get();
    } else if (actualValue instanceof HiveCharWritable) {
        actualValue = ((HiveCharWritable) actualValue).getPaddedValue().toString();
    } else if (actualValue instanceof LongWritable) {
        actualValue = ((LongWritable) actualValue).get();
    } else if (actualValue instanceof ShortWritable) {
        actualValue = ((ShortWritable) actualValue).get();
    } else if (actualValue instanceof HiveDecimalWritable) {
        DecimalType decimalType = (DecimalType) type;
        HiveDecimalWritable writable = (HiveDecimalWritable) actualValue;
        // writable messes with the scale so rescale the values to the Presto type
        BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale());
        actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale());
    } else if (actualValue instanceof Text) {
        actualValue = actualValue.toString();
    } else if (actualValue instanceof TimestampWritable) {
        TimestampWritable timestamp = (TimestampWritable) actualValue;
        actualValue = sqlTimestampOf((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L), SESSION);
    } else if (actualValue instanceof OrcStruct) {
        List<Object> fields = new ArrayList<>();
        OrcStruct structObject = (OrcStruct) actualValue;
        for (int fieldId = 0; fieldId < structObject.getNumFields(); fieldId++) {
            fields.add(OrcUtil.getFieldValue(structObject, fieldId));
        }
        actualValue = decodeRecordReaderStruct(type, fields);
    } else if (actualValue instanceof com.facebook.hive.orc.OrcStruct) {
        List<Object> fields = new ArrayList<>();
        com.facebook.hive.orc.OrcStruct structObject = (com.facebook.hive.orc.OrcStruct) actualValue;
        for (int fieldId = 0; fieldId < structObject.getNumFields(); fieldId++) {
            fields.add(structObject.getFieldValue(fieldId));
        }
        actualValue = decodeRecordReaderStruct(type, fields);
    } else if (actualValue instanceof List) {
        actualValue = decodeRecordReaderList(type, ((List<?>) actualValue));
    } else if (actualValue instanceof Map) {
        actualValue = decodeRecordReaderMap(type, (Map<?, ?>) actualValue);
    }
    return actualValue;
}
Also used : SqlVarbinary(com.facebook.presto.common.type.SqlVarbinary) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) UncheckedIOException(java.io.UncheckedIOException) DoubleWritable(org.apache.hadoop.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) OrcLazyObject(com.facebook.hive.orc.lazy.OrcLazyObject) OrcStruct(org.apache.hadoop.hive.ql.io.orc.OrcStruct) Arrays.asList(java.util.Arrays.asList) ImmutableList.toImmutableList(com.google.common.collect.ImmutableList.toImmutableList) Lists.newArrayList(com.google.common.collect.Lists.newArrayList) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) SqlDecimal(com.facebook.presto.common.type.SqlDecimal) Text(org.apache.hadoop.io.Text) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) SqlDate(com.facebook.presto.common.type.SqlDate) DecimalType(com.facebook.presto.common.type.DecimalType) BigInteger(java.math.BigInteger) OrcLazyObject(com.facebook.hive.orc.lazy.OrcLazyObject) Map(java.util.Map) ImmutableMap.toImmutableMap(com.google.common.collect.ImmutableMap.toImmutableMap) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap)

Example 8 with DoubleWritable

use of org.apache.hadoop.io.DoubleWritable in project druid by druid-io.

the class OrcStructConverterTest method testConvertRootFieldWithNonNullDoubleReturningOriginalValue.

@Test
public void testConvertRootFieldWithNonNullDoubleReturningOriginalValue() {
    final OrcStructConverter converter = new OrcStructConverter(false);
    assertConversion(converter, TypeDescription.createDouble(), 1.0d, new DoubleWritable(1.0d));
}
Also used : DoubleWritable(org.apache.hadoop.io.DoubleWritable) Test(org.junit.Test)

Example 9 with DoubleWritable

use of org.apache.hadoop.io.DoubleWritable in project hive by apache.

the class TestETypeConverter method testGetIntConverterForDouble.

@Test
public void testGetIntConverterForDouble() throws Exception {
    PrimitiveType primitiveType = Types.optional(PrimitiveTypeName.INT32).named("value");
    Writable writable = getWritableFromPrimitiveConverter(createHiveTypeInfo("double"), primitiveType, 22225);
    DoubleWritable doubleWritable = (DoubleWritable) writable;
    assertEquals((double) 22225, (double) doubleWritable.get(), 0);
}
Also used : Writable(org.apache.hadoop.io.Writable) DoubleWritable(org.apache.hadoop.io.DoubleWritable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) PrimitiveType(org.apache.parquet.schema.PrimitiveType) DoubleWritable(org.apache.hadoop.io.DoubleWritable) Test(org.junit.Test)

Example 10 with DoubleWritable

use of org.apache.hadoop.io.DoubleWritable in project hive by apache.

the class TestETypeConverter method testGetDecimalConverterDoubleHiveType.

@Test
public void testGetDecimalConverterDoubleHiveType() throws Exception {
    PrimitiveType primitiveType = Types.optional(PrimitiveTypeName.BINARY).as(LogicalTypeAnnotation.decimalType(2, 7)).named("value");
    Writable writable = getWritableFromPrimitiveConverter(createHiveTypeInfo("double"), primitiveType, 2200);
    DoubleWritable doubleWritable = (DoubleWritable) writable;
    assertEquals(22, (int) doubleWritable.get());
}
Also used : Writable(org.apache.hadoop.io.Writable) DoubleWritable(org.apache.hadoop.io.DoubleWritable) LongWritable(org.apache.hadoop.io.LongWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) IntWritable(org.apache.hadoop.io.IntWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) PrimitiveType(org.apache.parquet.schema.PrimitiveType) DoubleWritable(org.apache.hadoop.io.DoubleWritable) Test(org.junit.Test)

Aggregations

DoubleWritable (org.apache.hadoop.io.DoubleWritable)38 IntWritable (org.apache.hadoop.io.IntWritable)20 Test (org.junit.Test)19 Path (org.apache.hadoop.fs.Path)12 BooleanWritable (org.apache.hadoop.io.BooleanWritable)12 LongWritable (org.apache.hadoop.io.LongWritable)12 BytesWritable (org.apache.hadoop.io.BytesWritable)11 FloatWritable (org.apache.hadoop.io.FloatWritable)11 FileSystem (org.apache.hadoop.fs.FileSystem)10 Writable (org.apache.hadoop.io.Writable)10 Configuration (org.apache.hadoop.conf.Configuration)9 Text (org.apache.hadoop.io.Text)8 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)7 ByteWritable (org.apache.hadoop.io.ByteWritable)7 NullWritable (org.apache.hadoop.io.NullWritable)6 ArrayList (java.util.ArrayList)5 Map (java.util.Map)5 SequenceFile (org.apache.hadoop.io.SequenceFile)5 IOException (java.io.IOException)4 HashMap (java.util.HashMap)4