Search in sources :

Example 46 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestVectorTypeCasts method testCastDoubleToDecimal.

@Test
public void testCastDoubleToDecimal() {
    VectorizedRowBatch b = getBatchDoubleDecimal();
    VectorExpression expr = new CastDoubleToDecimal(0, 1);
    expr.evaluate(b);
    DecimalColumnVector r = (DecimalColumnVector) b.cols[1];
    HiveDecimal hd0 = HiveDecimal.create("0.0");
    if (!hd0.equals(r.vector[0].getHiveDecimal())) {
        assertTrue(false);
    }
    HiveDecimal hd1 = HiveDecimal.create("-1.0");
    if (!hd1.equals(r.vector[1].getHiveDecimal())) {
        assertTrue(false);
    }
    HiveDecimal hd2 = HiveDecimal.create("99999999999999");
    if (!hd2.equals(r.vector[2].getHiveDecimal())) {
        assertTrue(false);
    }
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Test(org.junit.Test)

Example 47 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class TestVectorTypeCasts method getBatchDecimalTimestamp.

private VectorizedRowBatch getBatchDecimalTimestamp(double[] doubleValues) {
    VectorizedRowBatch b = new VectorizedRowBatch(2);
    DecimalColumnVector dv;
    b.cols[0] = dv = new DecimalColumnVector(doubleValues.length, HiveDecimal.SYSTEM_DEFAULT_PRECISION, HiveDecimal.SYSTEM_DEFAULT_SCALE);
    b.cols[1] = new TimestampColumnVector(doubleValues.length);
    dv.noNulls = true;
    Random r = new Random(94830);
    for (int i = 0; i < doubleValues.length; i++) {
        long millis = RandomTypeUtil.randomMillis(r);
        Timestamp ts = new Timestamp(millis);
        int nanos = RandomTypeUtil.randomNanos(r);
        ts.setNanos(nanos);
        TimestampWritable tsw = new TimestampWritable(ts);
        double asDouble = tsw.getDouble();
        doubleValues[i] = asDouble;
        HiveDecimal hiveDecimal = HiveDecimal.create(new BigDecimal(asDouble));
        dv.set(i, hiveDecimal);
    }
    b.size = doubleValues.length;
    return b;
}
Also used : VectorizedRowBatch(org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch) TimestampColumnVector(org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector) DecimalColumnVector(org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector) Random(java.util.Random) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) Timestamp(java.sql.Timestamp) BigDecimal(java.math.BigDecimal)

Example 48 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class GenericUDFPrintf method evaluate.

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    // which case, "null" will be printed.)
    if (arguments[0].get() == null) {
        return null;
    }
    StringBuilder sb = new StringBuilder();
    Formatter formatter = new Formatter(sb, Locale.US);
    Text pattern = (Text) converterFormat.convert(arguments[0].get());
    ArrayList<Object> argumentList = new ArrayList<Object>();
    for (int i = 1; i < arguments.length; i++) {
        PrimitiveObjectInspector poi = (PrimitiveObjectInspector) argumentOIs[i];
        switch(poi.getPrimitiveCategory()) {
            case BOOLEAN:
            case BYTE:
            case SHORT:
            case INT:
            case LONG:
            case FLOAT:
            case DOUBLE:
            case CHAR:
            case VARCHAR:
            case STRING:
            case TIMESTAMP:
                argumentList.add(poi.getPrimitiveJavaObject(arguments[i].get()));
                break;
            case DECIMAL:
                // Decimal classes cannot be converted by printf, so convert them to doubles.
                Object obj = poi.getPrimitiveJavaObject(arguments[i].get());
                if (obj instanceof HiveDecimal) {
                    obj = ((HiveDecimal) obj).doubleValue();
                } else if (obj instanceof BigDecimal) {
                    obj = ((BigDecimal) obj).doubleValue();
                }
                argumentList.add(obj);
                break;
            default:
                argumentList.add(arguments[i].get());
                break;
        }
    }
    formatter.format(pattern.toString(), argumentList.toArray());
    resultText.set(sb.toString());
    formatter.close();
    return resultText;
}
Also used : Formatter(java.util.Formatter) ArrayList(java.util.ArrayList) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) Text(org.apache.hadoop.io.Text) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) BigDecimal(java.math.BigDecimal)

Example 49 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class GenericUDFOPMod method evaluate.

@Override
protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) {
    if (right.compareTo(HiveDecimal.ZERO) == 0) {
        return null;
    }
    HiveDecimal dec = left.remainder(right);
    if (dec == null) {
        return null;
    }
    decimalWritable.set(dec);
    return decimalWritable;
}
Also used : HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Example 50 with HiveDecimal

use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.

the class GenericUDFPosMod method evaluate.

@Override
protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) {
    if (right.compareTo(HiveDecimal.ZERO) == 0) {
        return null;
    }
    HiveDecimal dec = left.remainder(right).add(right).remainder(right);
    if (dec == null) {
        return null;
    }
    decimalWritable.set(dec);
    return decimalWritable;
}
Also used : HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal)

Aggregations

HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)94 Test (org.junit.Test)28 Timestamp (java.sql.Timestamp)24 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)23 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)22 Text (org.apache.hadoop.io.Text)22 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)21 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)21 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)20 Date (java.sql.Date)19 VectorizedRowBatch (org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch)18 BytesWritable (org.apache.hadoop.io.BytesWritable)17 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)16 HiveIntervalYearMonth (org.apache.hadoop.hive.common.type.HiveIntervalYearMonth)15 DateWritable (org.apache.hadoop.hive.serde2.io.DateWritable)15 TimestampWritable (org.apache.hadoop.hive.serde2.io.TimestampWritable)15 HiveIntervalDayTime (org.apache.hadoop.hive.common.type.HiveIntervalDayTime)14 IntWritable (org.apache.hadoop.io.IntWritable)14 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)13 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)13