use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class TestVectorTypeCasts method testCastDoubleToDecimal.
@Test
public void testCastDoubleToDecimal() {
VectorizedRowBatch b = getBatchDoubleDecimal();
VectorExpression expr = new CastDoubleToDecimal(0, 1);
expr.evaluate(b);
DecimalColumnVector r = (DecimalColumnVector) b.cols[1];
HiveDecimal hd0 = HiveDecimal.create("0.0");
if (!hd0.equals(r.vector[0].getHiveDecimal())) {
assertTrue(false);
}
HiveDecimal hd1 = HiveDecimal.create("-1.0");
if (!hd1.equals(r.vector[1].getHiveDecimal())) {
assertTrue(false);
}
HiveDecimal hd2 = HiveDecimal.create("99999999999999");
if (!hd2.equals(r.vector[2].getHiveDecimal())) {
assertTrue(false);
}
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class TestVectorTypeCasts method getBatchDecimalTimestamp.
private VectorizedRowBatch getBatchDecimalTimestamp(double[] doubleValues) {
VectorizedRowBatch b = new VectorizedRowBatch(2);
DecimalColumnVector dv;
b.cols[0] = dv = new DecimalColumnVector(doubleValues.length, HiveDecimal.SYSTEM_DEFAULT_PRECISION, HiveDecimal.SYSTEM_DEFAULT_SCALE);
b.cols[1] = new TimestampColumnVector(doubleValues.length);
dv.noNulls = true;
Random r = new Random(94830);
for (int i = 0; i < doubleValues.length; i++) {
long millis = RandomTypeUtil.randomMillis(r);
Timestamp ts = new Timestamp(millis);
int nanos = RandomTypeUtil.randomNanos(r);
ts.setNanos(nanos);
TimestampWritable tsw = new TimestampWritable(ts);
double asDouble = tsw.getDouble();
doubleValues[i] = asDouble;
HiveDecimal hiveDecimal = HiveDecimal.create(new BigDecimal(asDouble));
dv.set(i, hiveDecimal);
}
b.size = doubleValues.length;
return b;
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class GenericUDFPrintf method evaluate.
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
// which case, "null" will be printed.)
if (arguments[0].get() == null) {
return null;
}
StringBuilder sb = new StringBuilder();
Formatter formatter = new Formatter(sb, Locale.US);
Text pattern = (Text) converterFormat.convert(arguments[0].get());
ArrayList<Object> argumentList = new ArrayList<Object>();
for (int i = 1; i < arguments.length; i++) {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) argumentOIs[i];
switch(poi.getPrimitiveCategory()) {
case BOOLEAN:
case BYTE:
case SHORT:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
case CHAR:
case VARCHAR:
case STRING:
case TIMESTAMP:
argumentList.add(poi.getPrimitiveJavaObject(arguments[i].get()));
break;
case DECIMAL:
// Decimal classes cannot be converted by printf, so convert them to doubles.
Object obj = poi.getPrimitiveJavaObject(arguments[i].get());
if (obj instanceof HiveDecimal) {
obj = ((HiveDecimal) obj).doubleValue();
} else if (obj instanceof BigDecimal) {
obj = ((BigDecimal) obj).doubleValue();
}
argumentList.add(obj);
break;
default:
argumentList.add(arguments[i].get());
break;
}
}
formatter.format(pattern.toString(), argumentList.toArray());
resultText.set(sb.toString());
formatter.close();
return resultText;
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class GenericUDFOPMod method evaluate.
@Override
protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) {
if (right.compareTo(HiveDecimal.ZERO) == 0) {
return null;
}
HiveDecimal dec = left.remainder(right);
if (dec == null) {
return null;
}
decimalWritable.set(dec);
return decimalWritable;
}
use of org.apache.hadoop.hive.common.type.HiveDecimal in project hive by apache.
the class GenericUDFPosMod method evaluate.
@Override
protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) {
if (right.compareTo(HiveDecimal.ZERO) == 0) {
return null;
}
HiveDecimal dec = left.remainder(right).add(right).remainder(right);
if (dec == null) {
return null;
}
decimalWritable.set(dec);
return decimalWritable;
}
Aggregations