use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestHiveDecimal method doTestDecimalsWithPrecisionScale.
public void doTestDecimalsWithPrecisionScale(String[] decStrings, int precision, int scale) {
HiveDecimalV1 oldSum = HiveDecimalV1.create(0);
HiveDecimalWritable sum = new HiveDecimalWritable(0);
for (int i = 0; i < decStrings.length; i++) {
String string = decStrings[i];
HiveDecimalV1 oldDec = HiveDecimalV1.create(string);
HiveDecimalV1 resultOldDec;
if (oldDec == null) {
resultOldDec = null;
} else {
resultOldDec = HiveDecimalV1.enforcePrecisionScale(oldDec, precision, scale);
}
HiveDecimal dec = HiveDecimal.create(string);
if (oldDec == null) {
Assert.assertTrue(dec == null);
continue;
}
HiveDecimal resultDec = HiveDecimal.enforcePrecisionScale(dec, precision, scale);
if (resultOldDec == null) {
Assert.assertTrue(resultDec == null);
continue;
}
Assert.assertEquals(resultOldDec.toString(), resultDec.toString());
Assert.assertEquals(resultOldDec.toFormatString(scale), resultDec.toFormatString(scale));
oldSum = oldSum.add(resultOldDec);
sum.mutateAdd(resultDec);
}
Assert.assertEquals(oldSum.toString(), sum.toString());
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestHiveDecimal method doTestDecimal64.
private void doTestDecimal64(Random r, BigDecimal inputBigDecimal, int precision, int scale) {
BigDecimal bigDecimal = inputBigDecimal;
if (!bigDecimal.equals(BigDecimal.ZERO)) {
while (true) {
bigDecimal = bigDecimal.remainder(BigDecimal.valueOf(10).pow(precision - scale));
bigDecimal = bigDecimal.setScale(scale, BigDecimal.ROUND_DOWN);
if (!bigDecimal.unscaledValue().equals(BigInteger.ZERO)) {
break;
}
bigDecimal = randHiveBigDecimalNormalRange(r, standardAlphabet);
}
}
HiveDecimal dec = HiveDecimal.create(bigDecimal);
assertTrue(dec != null);
dec.validate();
HiveDecimalWritable decWritable = new HiveDecimalWritable(dec);
final long decimal64Long = decWritable.serialize64(scale);
assertTrue(decimal64Long <= HiveDecimalWritable.getDecimal64AbsMax(precision));
HiveDecimalWritable resultWritable = new HiveDecimalWritable(0);
resultWritable.deserialize64(decimal64Long, scale);
assertEquals(dec, resultWritable.getHiveDecimal());
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestHiveDecimal method doTestWriteReadFields.
private void doTestWriteReadFields(Random r, BigDecimal bigDecimal) throws IOException {
HiveDecimalV1 oldDec = HiveDecimalV1.create(bigDecimal);
if (oldDec != null && isTenPowerBug(oldDec.toString())) {
return;
}
HiveDecimal dec = HiveDecimal.create(bigDecimal);
if (oldDec == null) {
assertTrue(dec == null);
return;
}
assertTrue(dec != null);
dec.validate();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(baos);
HiveDecimalWritable decimalWritableOut = new HiveDecimalWritable(dec);
decimalWritableOut.write(out);
byte[] valueBytes = baos.toByteArray();
ByteArrayInputStream bais = new ByteArrayInputStream(valueBytes);
DataInputStream in = new DataInputStream(bais);
HiveDecimalWritable decimalWritableIn = new HiveDecimalWritable();
decimalWritableIn.readFields(in);
Assert.assertEquals(dec, decimalWritableIn.getHiveDecimal());
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TimestampUtils method decimalToTimestamp.
/**
* Take a HiveDecimal and return the timestamp representation where the fraction part is the
* nanoseconds and integer part is the number of seconds.
* @param dec
* @return
*/
public static Timestamp decimalToTimestamp(HiveDecimal dec) {
HiveDecimalWritable nanosWritable = new HiveDecimalWritable(dec);
// Clip off seconds portion.
nanosWritable.mutateFractionPortion();
// Bring nanoseconds into integer portion.
nanosWritable.mutateScaleByPowerOfTen(9);
if (!nanosWritable.isSet() || !nanosWritable.isInt()) {
return null;
}
int nanos = nanosWritable.intValue();
if (nanos < 0) {
nanos += 1000000000;
}
nanosWritable.setFromLong(nanos);
HiveDecimalWritable nanoInstant = new HiveDecimalWritable(dec);
nanoInstant.mutateScaleByPowerOfTen(9);
nanoInstant.mutateSubtract(nanosWritable);
// Back to seconds.
nanoInstant.mutateScaleByPowerOfTen(-9);
if (!nanoInstant.isSet() || !nanoInstant.isLong()) {
return null;
}
long seconds = nanoInstant.longValue();
Timestamp t = new Timestamp(seconds * 1000);
t.setNanos(nanos);
return t;
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class DecimalColumnVector method ensureSize.
@Override
public void ensureSize(int size, boolean preserveData) {
super.ensureSize(size, preserveData);
// We assume the existing vector is always valid.
if (size <= vector.length)
return;
HiveDecimalWritable[] oldArray = vector;
vector = new HiveDecimalWritable[size];
int initPos = 0;
if (preserveData) {
// we copy all of the values to avoid creating more objects
// TODO: it might be cheaper to always preserve data or reset existing objects
initPos = oldArray.length;
System.arraycopy(oldArray, 0, vector, 0, oldArray.length);
}
for (int i = initPos; i < vector.length; ++i) {
// Initially zero.
vector[i] = new HiveDecimalWritable(0);
}
}
Aggregations