use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class GenericUDFOPMultiply method evaluate.
@Override
protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) {
HiveDecimal dec = left.multiply(right);
if (dec == null) {
return null;
}
decimalWritable.set(dec);
return decimalWritable;
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class TestVectorFilterExpressions method testFilterDecimalScalarGreaterThanColumn.
/**
* Spot check scalar > col for decimal.
*/
@Test
public void testFilterDecimalScalarGreaterThanColumn() {
VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
HiveDecimal scalar = HiveDecimal.create("0");
VectorExpression expr = new FilterDecimalScalarGreaterDecimalColumn(scalar, 0);
expr.evaluate(b);
// check that right row(s) are selected
assertTrue(b.selectedInUse);
assertEquals(1, b.selected[0]);
assertEquals(1, b.size);
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class TestVectorFilterExpressions method testFilterDecimalColEqualDecimalScalar.
/**
* This tests the template for Decimal Column-Scalar comparison filters,
* called FilterDecimalColumnCompareScalar.txt. Only equal is tested for
* multiple cases because the logic is the same for <, >, <=, >=, == and !=.
*/
@Test
public void testFilterDecimalColEqualDecimalScalar() {
VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
HiveDecimal scalar = HiveDecimal.create("-3.30");
VectorExpression expr = new FilterDecimalColEqualDecimalScalar(0, scalar);
expr.evaluate(b);
// check that right row(s) are selected
assertTrue(b.selectedInUse);
assertEquals(1, b.selected[0]);
assertEquals(1, b.size);
// try again with a null value
b = getVectorizedRowBatch1DecimalCol();
b.cols[0].noNulls = false;
b.cols[0].isNull[1] = true;
expr.evaluate(b);
// verify that no rows were selected
assertEquals(0, b.size);
// try the repeating case
b = getVectorizedRowBatch1DecimalCol();
b.cols[0].isRepeating = true;
expr.evaluate(b);
// verify that no rows were selected
assertEquals(0, b.size);
// try the repeating null case
b = getVectorizedRowBatch1DecimalCol();
b.cols[0].isRepeating = true;
b.cols[0].noNulls = false;
b.cols[0].isNull[0] = true;
expr.evaluate(b);
// verify that no rows were selected
assertEquals(0, b.size);
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class TestVectorFilterExpressions method testFilterDecimalScalarEqualDecimalColumn.
/**
* This tests the template for Decimal Scalar-Column comparison filters,
* called FilterDecimalScalarCompareColumn.txt. Only equal is tested for multiple
* cases because the logic is the same for <, >, <=, >=, == and !=.
*/
@Test
public void testFilterDecimalScalarEqualDecimalColumn() {
VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
HiveDecimal scalar = HiveDecimal.create("-3.30");
VectorExpression expr = new FilterDecimalScalarEqualDecimalColumn(scalar, 0);
expr.evaluate(b);
// check that right row(s) are selected
assertTrue(b.selectedInUse);
assertEquals(1, b.selected[0]);
assertEquals(1, b.size);
// try again with a null value
b = getVectorizedRowBatch1DecimalCol();
b.cols[0].noNulls = false;
b.cols[0].isNull[1] = true;
expr.evaluate(b);
// verify that no rows were selected
assertEquals(0, b.size);
// try the repeating case
b = getVectorizedRowBatch1DecimalCol();
b.cols[0].isRepeating = true;
expr.evaluate(b);
// verify that no rows were selected
assertEquals(0, b.size);
// try the repeating null case
b = getVectorizedRowBatch1DecimalCol();
b.cols[0].isRepeating = true;
b.cols[0].noNulls = false;
b.cols[0].isNull[0] = true;
expr.evaluate(b);
// verify that no rows were selected
assertEquals(0, b.size);
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class TestVectorTypeCasts method testCastTimestampToDecimal.
@Test
public void testCastTimestampToDecimal() {
// The input timestamps are stored as long values
// measured in nanoseconds from the epoch.
HiveDecimal[] hiveDecimalValues = new HiveDecimal[500];
VectorizedRowBatch b = getBatchTimestampDecimal(hiveDecimalValues);
VectorExpression expr = new CastTimestampToDecimal(0, 1);
TimestampColumnVector inT = (TimestampColumnVector) b.cols[0];
expr.evaluate(b);
DecimalColumnVector r = (DecimalColumnVector) b.cols[1];
for (int i = 0; i < hiveDecimalValues.length; i++) {
HiveDecimal hiveDecimal = r.vector[i].getHiveDecimal();
HiveDecimal expectedHiveDecimal = hiveDecimalValues[i];
if (!hiveDecimal.equals(expectedHiveDecimal)) {
assertTrue(false);
}
}
// Try again with a value that won't fit in 5 digits, to make
// sure that NULL is produced.
b.cols[1] = r = new DecimalColumnVector(hiveDecimalValues.length, 5, 2);
expr.evaluate(b);
r = (DecimalColumnVector) b.cols[1];
for (int i = 0; i < hiveDecimalValues.length; i++) {
HiveDecimal hiveDecimal = r.vector[i].getHiveDecimal();
HiveDecimal expectedHiveDecimal = hiveDecimalValues[i];
if (HiveDecimal.enforcePrecisionScale(expectedHiveDecimal, 5, 2) == null) {
assertTrue(r.isNull[i]);
} else {
assertTrue(!r.isNull[i]);
if (!hiveDecimal.equals(expectedHiveDecimal)) {
assertTrue(false);
}
}
}
}
Aggregations