Search in sources :

Example 6 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestVectorArithmeticExpressions method testDecimalScalarModuloDecimalColumn.

// Spot check decimal scalar-column modulo
@Test
public void testDecimalScalarModuloDecimalColumn() {
    VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
    HiveDecimal d = HiveDecimal.create("2.00");
    VectorExpression expr = new DecimalScalarModuloDecimalColumn(d, 0, 2);
    // test without nulls
    expr.evaluate(b);
    DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.80")));
    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("2.00")));
    // entry 2 will be null due to zero-divide
    assertFalse(r.noNulls);
    assertTrue(r.isNull[2]);
    // try again with some different data values
    DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
    expr = new DecimalScalarModuloDecimalColumn(d, 0, 2);
    in.vector[0].set(HiveDecimal.create("0.50"));
    in.vector[1].set(HiveDecimal.create("0.80"));
    in.vector[2].set(HiveDecimal.create("0.70"));
    expr.evaluate(b);
    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("0.00")));
    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("0.40")));
    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0.60")));
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) TestVectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) DecimalScalarModuloDecimalColumn(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalScalarModuloDecimalColumn) Test(org.junit.Test)

Example 7 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestVectorArithmeticExpressions method testDecimalColMultiplyDecimalScalar.

/* Spot check correctness of decimal column multiply decimal scalar. The case for
   * addition checks all the cases for the template, so don't do that redundantly here.
   */
@Test
public void testDecimalColMultiplyDecimalScalar() {
    VectorizedRowBatch b = getVectorizedRowBatch3DecimalCols();
    HiveDecimal d = HiveDecimal.create(2);
    VectorExpression expr = new DecimalColMultiplyDecimalScalar(0, d, 2);
    // test without nulls
    expr.evaluate(b);
    DecimalColumnVector r = (DecimalColumnVector) b.cols[2];
    assertTrue(r.vector[0].getHiveDecimal().equals(HiveDecimal.create("2.40")));
    assertTrue(r.vector[1].getHiveDecimal().equals(HiveDecimal.create("-6.60")));
    assertTrue(r.vector[2].getHiveDecimal().equals(HiveDecimal.create("0")));
    // test that overflow produces null
    b = getVectorizedRowBatch3DecimalCols();
    DecimalColumnVector in = (DecimalColumnVector) b.cols[0];
    // set to max possible value
    in.vector[0].set(HiveDecimal.create("9999999999999999.99"));
    expr.evaluate(b);
    r = (DecimalColumnVector) b.cols[2];
    assertFalse(r.noNulls);
    assertTrue(r.isNull[0]);
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) TestVectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) DecimalColMultiplyDecimalScalar(org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DecimalColMultiplyDecimalScalar) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Test(org.junit.Test)

Example 8 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestDecimalUtil method testSign.

@Test
public void testSign() {
    LongColumnVector lcv = new LongColumnVector(4);
    HiveDecimal d1 = HiveDecimal.create("19.56778");
    DecimalUtil.sign(0, d1, lcv);
    Assert.assertEquals(1, lcv.vector[0]);
    HiveDecimal d2 = HiveDecimal.create("-25.34567");
    DecimalUtil.sign(0, d2, lcv);
    Assert.assertEquals(-1, lcv.vector[0]);
    HiveDecimal d3 = HiveDecimal.create("0.00000");
    Assert.assertEquals(0, d3.scale());
    DecimalUtil.sign(0, d3, lcv);
    Assert.assertEquals(0, lcv.vector[0]);
}
Also used : HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongColumnVector(org.apache.hadoop.hive.ql.exec.vector.LongColumnVector) Test(org.junit.Test)

Example 9 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class PrimitiveObjectInspectorUtils method getHiveDecimal.

public static HiveDecimal getHiveDecimal(Object o, PrimitiveObjectInspector oi) {
    if (o == null) {
        return null;
    }
    HiveDecimal result = null;
    switch(oi.getPrimitiveCategory()) {
        case VOID:
            result = null;
            break;
        case BOOLEAN:
            result = ((BooleanObjectInspector) oi).get(o) ? HiveDecimal.ONE : HiveDecimal.ZERO;
            break;
        case BYTE:
            result = HiveDecimal.create(((ByteObjectInspector) oi).get(o));
            break;
        case SHORT:
            result = HiveDecimal.create(((ShortObjectInspector) oi).get(o));
            break;
        case INT:
            result = HiveDecimal.create(((IntObjectInspector) oi).get(o));
            break;
        case LONG:
            result = HiveDecimal.create(((LongObjectInspector) oi).get(o));
            break;
        case FLOAT:
            Float f = ((FloatObjectInspector) oi).get(o);
            result = HiveDecimal.create(f.toString());
            break;
        case DOUBLE:
            Double d = ((DoubleObjectInspector) oi).get(o);
            result = HiveDecimal.create(d.toString());
            break;
        case STRING:
            result = HiveDecimal.create(((StringObjectInspector) oi).getPrimitiveJavaObject(o));
            break;
        case CHAR:
        case VARCHAR:
            result = HiveDecimal.create(getString(o, oi));
            break;
        case TIMESTAMP:
            Double ts = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getDouble();
            result = HiveDecimal.create(ts.toString());
            break;
        case DECIMAL:
            result = ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o);
            break;
        // unsupported conversion
        case DATE:
        default:
            throw new RuntimeException("Hive 2 Internal error: unsupported conversion from type: " + oi.getTypeName());
    }
    return result;
}
Also used : HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Example 10 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class PrimitiveObjectInspectorUtils method getLong.

/**
   * Get the long value out of a primitive object. Note that
   * NullPointerException will be thrown if o is null. Note that
   * NumberFormatException will be thrown if o is not a valid number.
   */
public static long getLong(Object o, PrimitiveObjectInspector oi) {
    long result = 0;
    switch(oi.getPrimitiveCategory()) {
        case VOID:
            result = 0;
            break;
        case BOOLEAN:
            result = (((BooleanObjectInspector) oi).get(o) ? 1 : 0);
            break;
        case BYTE:
            result = ((ByteObjectInspector) oi).get(o);
            break;
        case SHORT:
            result = ((ShortObjectInspector) oi).get(o);
            break;
        case INT:
            result = ((IntObjectInspector) oi).get(o);
            break;
        case LONG:
            result = ((LongObjectInspector) oi).get(o);
            break;
        case FLOAT:
            result = (long) ((FloatObjectInspector) oi).get(o);
            break;
        case DOUBLE:
            result = (long) ((DoubleObjectInspector) oi).get(o);
            break;
        case STRING:
            StringObjectInspector soi = (StringObjectInspector) oi;
            if (soi.preferWritable()) {
                Text t = soi.getPrimitiveWritableObject(o);
                result = LazyLong.parseLong(t.getBytes(), 0, t.getLength());
            } else {
                String s = soi.getPrimitiveJavaObject(o);
                result = Long.parseLong(s);
            }
            break;
        case CHAR:
        case VARCHAR:
            {
                result = Long.parseLong(getString(o, oi));
                break;
            }
        case TIMESTAMP:
            result = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o).getSeconds();
            break;
        case DECIMAL:
            {
                HiveDecimal dec = ((HiveDecimalObjectInspector) oi).getPrimitiveJavaObject(o);
                if (!dec.isLong()) {
                    throw new NumberFormatException();
                }
                result = dec.longValue();
            }
            break;
        // unsupported conversion
        case DATE:
        default:
            throw new RuntimeException("Hive 2 Internal error: unsupported conversion from type: " + oi.getTypeName());
    }
    return result;
}
Also used : HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Text(org.apache.hadoop.io.Text)

Aggregations

HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)83 Test (org.junit.Test)28 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)24 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)18 Text (org.apache.hadoop.io.Text)16 Timestamp (java.sql.Timestamp)15 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)15 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)15 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)14 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)13 Date (java.sql.Date)11 BytesWritable (org.apache.hadoop.io.BytesWritable)11 TestVectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.TestVectorizedRowBatch)10 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)10 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)9 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)9 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)9 IntWritable (org.apache.hadoop.io.IntWritable)9 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)8 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)8