Search in sources :

Example 11 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class GenericUDFOPMultiply method evaluate.

@Override
protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) {
    HiveDecimal dec = left.multiply(right);
    if (dec == null) {
        return null;
    }
    decimalWritable.set(dec);
    return decimalWritable;
}
Also used : HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Example 12 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestVectorFilterExpressions method testFilterDecimalScalarGreaterThanColumn.

/**
   * Spot check scalar > col for decimal.
   */
@Test
public void testFilterDecimalScalarGreaterThanColumn() {
    VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
    HiveDecimal scalar = HiveDecimal.create("0");
    VectorExpression expr = new FilterDecimalScalarGreaterDecimalColumn(scalar, 0);
    expr.evaluate(b);
    // check that right row(s) are selected
    assertTrue(b.selectedInUse);
    assertEquals(1, b.selected[0]);
    assertEquals(1, b.size);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) FilterDecimalScalarGreaterDecimalColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalScalarGreaterDecimalColumn) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Test(org.junit.Test)

Example 13 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestVectorFilterExpressions method testFilterDecimalColEqualDecimalScalar.

/**
   * This tests the template for Decimal Column-Scalar comparison filters,
   * called FilterDecimalColumnCompareScalar.txt. Only equal is tested for
   * multiple cases because the logic is the same for <, >, <=, >=, == and !=.
   */
@Test
public void testFilterDecimalColEqualDecimalScalar() {
    VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
    HiveDecimal scalar = HiveDecimal.create("-3.30");
    VectorExpression expr = new FilterDecimalColEqualDecimalScalar(0, scalar);
    expr.evaluate(b);
    // check that right row(s) are selected
    assertTrue(b.selectedInUse);
    assertEquals(1, b.selected[0]);
    assertEquals(1, b.size);
    // try again with a null value
    b = getVectorizedRowBatch1DecimalCol();
    b.cols[0].noNulls = false;
    b.cols[0].isNull[1] = true;
    expr.evaluate(b);
    // verify that no rows were selected
    assertEquals(0, b.size);
    // try the repeating case
    b = getVectorizedRowBatch1DecimalCol();
    b.cols[0].isRepeating = true;
    expr.evaluate(b);
    // verify that no rows were selected
    assertEquals(0, b.size);
    // try the repeating null case
    b = getVectorizedRowBatch1DecimalCol();
    b.cols[0].isRepeating = true;
    b.cols[0].noNulls = false;
    b.cols[0].isNull[0] = true;
    expr.evaluate(b);
    // verify that no rows were selected
    assertEquals(0, b.size);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) FilterDecimalColEqualDecimalScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalColEqualDecimalScalar) Test(org.junit.Test)

Example 14 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestVectorFilterExpressions method testFilterDecimalScalarEqualDecimalColumn.

/**
   * This tests the template for Decimal Scalar-Column comparison filters,
   * called FilterDecimalScalarCompareColumn.txt. Only equal is tested for multiple
   * cases because the logic is the same for <, >, <=, >=, == and !=.
   */
@Test
public void testFilterDecimalScalarEqualDecimalColumn() {
    VectorizedRowBatch b = getVectorizedRowBatch1DecimalCol();
    HiveDecimal scalar = HiveDecimal.create("-3.30");
    VectorExpression expr = new FilterDecimalScalarEqualDecimalColumn(scalar, 0);
    expr.evaluate(b);
    // check that right row(s) are selected
    assertTrue(b.selectedInUse);
    assertEquals(1, b.selected[0]);
    assertEquals(1, b.size);
    // try again with a null value
    b = getVectorizedRowBatch1DecimalCol();
    b.cols[0].noNulls = false;
    b.cols[0].isNull[1] = true;
    expr.evaluate(b);
    // verify that no rows were selected
    assertEquals(0, b.size);
    // try the repeating case
    b = getVectorizedRowBatch1DecimalCol();
    b.cols[0].isRepeating = true;
    expr.evaluate(b);
    // verify that no rows were selected
    assertEquals(0, b.size);
    // try the repeating null case
    b = getVectorizedRowBatch1DecimalCol();
    b.cols[0].isRepeating = true;
    b.cols[0].noNulls = false;
    b.cols[0].isNull[0] = true;
    expr.evaluate(b);
    // verify that no rows were selected
    assertEquals(0, b.size);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) FilterDecimalScalarEqualDecimalColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDecimalScalarEqualDecimalColumn) Test(org.junit.Test)

Example 15 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestVectorTypeCasts method testCastTimestampToDecimal.

@Test
public void testCastTimestampToDecimal() {
    // The input timestamps are stored as long values
    // measured in nanoseconds from the epoch.
    HiveDecimal[] hiveDecimalValues = new HiveDecimal[500];
    VectorizedRowBatch b = getBatchTimestampDecimal(hiveDecimalValues);
    VectorExpression expr = new CastTimestampToDecimal(0, 1);
    TimestampColumnVector inT = (TimestampColumnVector) b.cols[0];
    expr.evaluate(b);
    DecimalColumnVector r = (DecimalColumnVector) b.cols[1];
    for (int i = 0; i < hiveDecimalValues.length; i++) {
        HiveDecimal hiveDecimal = r.vector[i].getHiveDecimal();
        HiveDecimal expectedHiveDecimal = hiveDecimalValues[i];
        if (!hiveDecimal.equals(expectedHiveDecimal)) {
            assertTrue(false);
        }
    }
    // Try again with a value that won't fit in 5 digits, to make
    // sure that NULL is produced.
    b.cols[1] = r = new DecimalColumnVector(hiveDecimalValues.length, 5, 2);
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[1];
    for (int i = 0; i < hiveDecimalValues.length; i++) {
        HiveDecimal hiveDecimal = r.vector[i].getHiveDecimal();
        HiveDecimal expectedHiveDecimal = hiveDecimalValues[i];
        if (HiveDecimal.enforcePrecisionScale(expectedHiveDecimal, 5, 2) == null) {
            assertTrue(r.isNull[i]);
        } else {
            assertTrue(!r.isNull[i]);
            if (!hiveDecimal.equals(expectedHiveDecimal)) {
                assertTrue(false);
            }
        }
    }
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) TimestampColumnVector(org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Test(org.junit.Test)

Aggregations

HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)83 Test (org.junit.Test)28 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)24 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)18 Text (org.apache.hadoop.io.Text)16 Timestamp (java.sql.Timestamp)15 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)15 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)15 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)14 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)13 Date (java.sql.Date)11 BytesWritable (org.apache.hadoop.io.BytesWritable)11 TestVectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch)10 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)10 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)9 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)9 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)9 IntWritable (org.apache.hadoop.io.IntWritable)9 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)8 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)8