Search in sources :

Example 86 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class TestHiveDecimal method doTestDecimalsWithPrecisionScale.

public void doTestDecimalsWithPrecisionScale(String[] decStrings, int precision, int scale) {
    HiveDecimalV1 oldSum = HiveDecimalV1.create(0);
    HiveDecimalWritable sum = new HiveDecimalWritable(0);
    for (int i = 0; i < decStrings.length; i++) {
        String string = decStrings[i];
        HiveDecimalV1 oldDec = HiveDecimalV1.create(string);
        HiveDecimalV1 resultOldDec;
        if (oldDec == null) {
            resultOldDec = null;
        } else {
            resultOldDec = HiveDecimalV1.enforcePrecisionScale(oldDec, precision, scale);
        }
        HiveDecimal dec = HiveDecimal.create(string);
        if (oldDec == null) {
            Assert.assertTrue(dec == null);
            continue;
        }
        HiveDecimal resultDec = HiveDecimal.enforcePrecisionScale(dec, precision, scale);
        if (resultOldDec == null) {
            Assert.assertTrue(resultDec == null);
            continue;
        }
        Assert.assertEquals(resultOldDec.toString(), resultDec.toString());
        Assert.assertEquals(resultOldDec.toFormatString(scale), resultDec.toFormatString(scale));
        oldSum = oldSum.add(resultOldDec);
        sum.mutateAdd(resultDec);
    }
    Assert.assertEquals(oldSum.toString(), sum.toString());
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)

Example 87 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class TestHiveDecimal method doTestDecimal64.

private void doTestDecimal64(Random r, BigDecimal inputBigDecimal, int precision, int scale) {
    BigDecimal bigDecimal = inputBigDecimal;
    if (!bigDecimal.equals(BigDecimal.ZERO)) {
        while (true) {
            bigDecimal = bigDecimal.remainder(BigDecimal.valueOf(10).pow(precision - scale));
            bigDecimal = bigDecimal.setScale(scale, BigDecimal.ROUND_DOWN);
            if (!bigDecimal.unscaledValue().equals(BigInteger.ZERO)) {
                break;
            }
            bigDecimal = randHiveBigDecimalNormalRange(r, standardAlphabet);
        }
    }
    HiveDecimal dec = HiveDecimal.create(bigDecimal);
    assertTrue(dec != null);
    dec.validate();
    HiveDecimalWritable decWritable = new HiveDecimalWritable(dec);
    final long decimal64Long = decWritable.serialize64(scale);
    assertTrue(decimal64Long <= HiveDecimalWritable.getDecimal64AbsMax(precision));
    HiveDecimalWritable resultWritable = new HiveDecimalWritable(0);
    resultWritable.deserialize64(decimal64Long, scale);
    assertEquals(dec, resultWritable.getHiveDecimal());
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) BigDecimal(java.math.BigDecimal)

Example 88 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class TestHiveDecimal method doTestWriteReadFields.

private void doTestWriteReadFields(Random r, BigDecimal bigDecimal) throws IOException {
    HiveDecimalV1 oldDec = HiveDecimalV1.create(bigDecimal);
    if (oldDec != null && isTenPowerBug(oldDec.toString())) {
        return;
    }
    HiveDecimal dec = HiveDecimal.create(bigDecimal);
    if (oldDec == null) {
        assertTrue(dec == null);
        return;
    }
    assertTrue(dec != null);
    dec.validate();
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(baos);
    HiveDecimalWritable decimalWritableOut = new HiveDecimalWritable(dec);
    decimalWritableOut.write(out);
    byte[] valueBytes = baos.toByteArray();
    ByteArrayInputStream bais = new ByteArrayInputStream(valueBytes);
    DataInputStream in = new DataInputStream(bais);
    HiveDecimalWritable decimalWritableIn = new HiveDecimalWritable();
    decimalWritableIn.readFields(in);
    Assert.assertEquals(dec, decimalWritableIn.getHiveDecimal());
}
Also used : ByteArrayInputStream(java.io.ByteArrayInputStream) DataOutputStream(java.io.DataOutputStream) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DataInputStream(java.io.DataInputStream)

Example 89 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class TimestampUtils method decimalToTimestamp.

/**
 * Take a HiveDecimal and return the timestamp representation where the fraction part is the
 * nanoseconds and integer part is the number of seconds.
 * @param dec
 * @return
 */
public static Timestamp decimalToTimestamp(HiveDecimal dec) {
    HiveDecimalWritable nanosWritable = new HiveDecimalWritable(dec);
    // Clip off seconds portion.
    nanosWritable.mutateFractionPortion();
    // Bring nanoseconds into integer portion.
    nanosWritable.mutateScaleByPowerOfTen(9);
    if (!nanosWritable.isSet() || !nanosWritable.isInt()) {
        return null;
    }
    int nanos = nanosWritable.intValue();
    if (nanos < 0) {
        nanos += 1000000000;
    }
    nanosWritable.setFromLong(nanos);
    HiveDecimalWritable nanoInstant = new HiveDecimalWritable(dec);
    nanoInstant.mutateScaleByPowerOfTen(9);
    nanoInstant.mutateSubtract(nanosWritable);
    // Back to seconds.
    nanoInstant.mutateScaleByPowerOfTen(-9);
    if (!nanoInstant.isSet() || !nanoInstant.isLong()) {
        return null;
    }
    long seconds = nanoInstant.longValue();
    Timestamp t = new Timestamp(seconds * 1000);
    t.setNanos(nanos);
    return t;
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) Timestamp(java.sql.Timestamp)

Example 90 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class DecimalColumnVector method ensureSize.

@Override
public void ensureSize(int size, boolean preserveData) {
    super.ensureSize(size, preserveData);
    // We assume the existing vector is always valid.
    if (size <= vector.length)
        return;
    HiveDecimalWritable[] oldArray = vector;
    vector = new HiveDecimalWritable[size];
    int initPos = 0;
    if (preserveData) {
        // we copy all of the values to avoid creating more objects
        // TODO: it might be cheaper to always preserve data or reset existing objects
        initPos = oldArray.length;
        System.arraycopy(oldArray, 0, vector, 0, oldArray.length);
    }
    for (int i = initPos; i < vector.length; ++i) {
        // Initially zero.
        vector[i] = new HiveDecimalWritable(0);
    }
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)

Aggregations

HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)185 Test (org.junit.Test)42 LongWritable (org.apache.hadoop.io.LongWritable)39 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)36 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)35 IntWritable (org.apache.hadoop.io.IntWritable)35 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)34 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)31 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)30 Text (org.apache.hadoop.io.Text)30 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)28 BytesWritable (org.apache.hadoop.io.BytesWritable)28 FloatWritable (org.apache.hadoop.io.FloatWritable)28 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)27 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)27 BooleanWritable (org.apache.hadoop.io.BooleanWritable)27 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)26 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)26 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)26 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)25