Search in sources :

Example 56 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestVectorArithmeticExpressions method testDecimalColDivideDecimalScalar.

/* Test decimal column to decimal scalar division. This is used to cover all the
   * cases used in the source code template ColumnDivideScalarDecimal.txt.
   * The template is used for division and modulo.
   */
@Test
public void testDecimalColDivideDecimalScalar() {
    VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
    HiveDecimal d = HiveDecimal.create("2.00");
    VectorExpression expr = new DecimalColDivideDecimalScalar(0, d, 2);
    // test without nulls
    expr.evaluate(b);
    DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.6")));
    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-1.65")));
    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0")));
    // test null propagation
    b = getVectorizedRowBatch3DecimalCols();
    DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
    r = (DecimalColumnVector) b.cols[2];
    in.noNulls = false;
    in.isNull[0] = true;
    expr.evaluate(b);
    assertTrue(!r.noNulls);
    assertTrue(r.isNull[0]);
    // test repeating case, no nulls
    b = getVectorizedRowBatch3DecimalCols();
    in = (DecimalColumnVector) b.cols[0];
    in.isRepeating = true;
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.isRepeating);
    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.6")));
    // test repeating case for null value
    b = getVectorizedRowBatch3DecimalCols();
    in = (DecimalColumnVector) b.cols[0];
    in.isRepeating = true;
    in.isNull[0] = true;
    in.noNulls = false;
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.isRepeating);
    assertTrue(!r.noNulls);
    assertTrue(r.isNull[0]);
    // test that zero-divide produces null for all output values
    b = getVectorizedRowBatch3DecimalCols();
    in = (DecimalColumnVector) b.cols[0];
    expr = new DecimalColDivideDecimalScalar(0, HiveDecimal.create("0"), 2);
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[2];
    assertFalse(r.noNulls);
    assertTrue(r.isNull[0]);
    assertTrue(r.isRepeating);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) TestVectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) DecimalColDivideDecimalScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalColDivideDecimalScalar) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Test(org.junit.Test)

Example 57 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestVectorArithmeticExpressions method testDecimalScalarSubtractDecimalColumn.

/* Spot check correctness of decimal scalar subtract decimal column. The case for
   * addition checks all the cases for the template, so don't do that redundantly here.
   */
@Test
public void testDecimalScalarSubtractDecimalColumn() {
    VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
    HiveDecimal d = HiveDecimal.create(1);
    VectorExpression expr = new DecimalScalarSubtractDecimalColumn(d, 0, 2);
    // test without nulls
    expr.evaluate(b);
    DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("-0.20")));
    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("4.30")));
    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("1")));
    // test that overflow produces null
    b = getVectorizedRowBatch3DecimalCols();
    DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
    // set to min possible value
    in.vector[0].set(HiveDecimal.create("-9999999999999999.99"));
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[2];
    assertFalse(r.noNulls);
    assertTrue(r.isNull[0]);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) TestVectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) DecimalScalarSubtractDecimalColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalScalarSubtractDecimalColumn) Test(org.junit.Test)

Example 58 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestVectorArithmeticExpressions method testDecimalScalarAddDecimalColumn.

/* Test decimal scalar to decimal column addition. This is used to cover all the
   * cases used in the source code template ScalarArithmeticColumnDecimal.txt.
   */
@Test
public void testDecimalScalarAddDecimalColumn() {
    VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
    HiveDecimal d = HiveDecimal.create(1);
    VectorExpression expr = new DecimalScalarAddDecimalColumn(d, 0, 2);
    // test without nulls
    expr.evaluate(b);
    DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.20")));
    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-2.30")));
    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("1")));
    // test null propagation
    b = getVectorizedRowBatch3DecimalCols();
    DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
    r = (DecimalColumnVector) b.cols[2];
    in.noNulls = false;
    in.isNull[0] = true;
    expr.evaluate(b);
    assertTrue(!r.noNulls);
    assertTrue(r.isNull[0]);
    // test repeating case, no nulls
    b = getVectorizedRowBatch3DecimalCols();
    in = (DecimalColumnVector) b.cols[0];
    in.isRepeating = true;
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.isRepeating);
    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.20")));
    // test repeating case for null value
    b = getVectorizedRowBatch3DecimalCols();
    in = (DecimalColumnVector) b.cols[0];
    in.isRepeating = true;
    in.isNull[0] = true;
    in.noNulls = false;
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.isRepeating);
    assertTrue(!r.noNulls);
    assertTrue(r.isNull[0]);
    // test that overflow produces null
    b = getVectorizedRowBatch3DecimalCols();
    in = (DecimalColumnVector) b.cols[0];
    // set to max possible value
    in.vector[0].set(HiveDecimal.create("9999999999999999.99"));
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[2];
    assertFalse(r.noNulls);
    assertTrue(r.isNull[0]);
}
Also used : DecimalScalarAddDecimalColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalScalarAddDecimalColumn) VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) TestVectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Test(org.junit.Test)

Example 59 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestVectorArithmeticExpressions method testDecimalScalarMultiplyDecimalColumn.

/* Spot check correctness of decimal scalar multiply decimal column. The case for
   * addition checks all the cases for the template, so don't do that redundantly here.
   */
@Test
public void testDecimalScalarMultiplyDecimalColumn() {
    VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
    HiveDecimal d = HiveDecimal.create(2);
    VectorExpression expr = new DecimalScalarMultiplyDecimalColumn(d, 0, 2);
    // test without nulls
    expr.evaluate(b);
    DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.40")));
    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-6.60")));
    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0")));
    // test that overflow produces null
    b = getVectorizedRowBatch3DecimalCols();
    DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
    // set to max possible value
    in.vector[0].set(HiveDecimal.create("9999999999999999.99"));
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[2];
    assertFalse(r.noNulls);
    assertTrue(r.isNull[0]);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) TestVectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) DecimalScalarMultiplyDecimalColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalScalarMultiplyDecimalColumn) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Test(org.junit.Test)

Example 60 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestDecimalUtil method testRoundWithDigits.

@Test
public void testRoundWithDigits() {
    DecimalColumnVector dcv = new DecimalColumnVector(4, 20, 3);
    HiveDecimal d1 = HiveDecimal.create("19.56778");
    HiveDecimal expected1 = HiveDecimal.create("19.568");
    DecimalUtil.round(0, d1, dcv);
    Assert.assertEquals(0, expected1.compareTo(dcv.vector[0].getHiveDecimal()));
    // As of HIVE-8745, these decimal values should be trimmed of trailing zeros.
    HiveDecimal d2 = HiveDecimal.create("23.56700");
    Assert.assertEquals(3, d2.scale());
    HiveDecimal expected2 = HiveDecimal.create("23.567");
    DecimalUtil.round(0, d2, dcv);
    Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal()));
    HiveDecimal d3 = HiveDecimal.create("-25.34567");
    HiveDecimal expected3 = HiveDecimal.create("-25.346");
    DecimalUtil.round(0, d3, dcv);
    Assert.assertEquals(0, expected3.compareTo(dcv.vector[0].getHiveDecimal()));
    HiveDecimal d4 = HiveDecimal.create("-17.23400");
    Assert.assertEquals(3, d4.scale());
    HiveDecimal expected4 = HiveDecimal.create("-17.234");
    DecimalUtil.round(0, d4, dcv);
    Assert.assertEquals(0, expected4.compareTo(dcv.vector[0].getHiveDecimal()));
    HiveDecimal d5 = HiveDecimal.create("19.36748");
    HiveDecimal expected5 = HiveDecimal.create("19.367");
    DecimalUtil.round(0, d5, dcv);
    Assert.assertEquals(0, expected5.compareTo(dcv.vector[0].getHiveDecimal()));
    HiveDecimal d6 = HiveDecimal.create("-25.54537");
    HiveDecimal expected6 = HiveDecimal.create("-25.545");
    DecimalUtil.round(0, d6, dcv);
    Assert.assertEquals(0, expected6.compareTo(dcv.vector[0].getHiveDecimal()));
}
Also used : DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Test(org.junit.Test)

Aggregations

HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)94 Test (org.junit.Test)28 Timestamp (java.sql.Timestamp)24 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)23 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)22 Text (org.apache.hadoop.io.Text)22 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)21 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)21 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)20 Date (java.sql.Date)19 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)18 BytesWritable (org.apache.hadoop.io.BytesWritable)17 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)16 HiveIntervalYearMonth (org.apache.hadoop.hive.common.type.HiveIntervalYearMonth)15 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)15 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)15 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)14 IntWritable (org.apache.hadoop.io.IntWritable)14 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)13 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)13