use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class TestGenericUDFOPPositive method testDecimal.
@Test
public void testDecimal() throws HiveException {
GenericUDFOPPositive udf = new GenericUDFOPPositive();
HiveDecimalWritable input = new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
DecimalTypeInfo inputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(11, 6);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(inputTypeInfo, oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("32300.004747"), res.getHiveDecimal());
}
use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class TestGenericUDFRound method testDecimalRoundingMetaData1.
@Test
public void testDecimalRoundingMetaData1() throws UDFArgumentException {
GenericUDFRound udf = new GenericUDFRound();
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(7, 3)), PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.intTypeInfo, new IntWritable(-2)) };
PrimitiveObjectInspector outputOI = (PrimitiveObjectInspector) udf.initialize(inputOIs);
DecimalTypeInfo outputTypeInfo = (DecimalTypeInfo) outputOI.getTypeInfo();
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(5, 0), outputTypeInfo);
}
use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class TestGenericUDFOPNegative method testDecimal.
@Test
public void testDecimal() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
HiveDecimalWritable input = new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
DecimalTypeInfo inputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(11, 6);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(inputTypeInfo, oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("-32300.004747"), res.getHiveDecimal());
}
use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class LazyBinaryDeserializeRead method readNextField.
/*
* Reads the the next field.
*
* Afterwards, reading is positioned to the next field.
*
* @return Return true when the field was not null and data is put in the appropriate
* current* member.
* Otherwise, false when the field is null.
*
*/
@Override
public boolean readNextField() throws IOException {
if (fieldIndex >= fieldCount) {
return false;
}
fieldStart = offset;
if (fieldIndex == 0) {
// the NULL byte.
if (offset >= end) {
throw new EOFException();
}
nullByte = bytes[offset++];
}
// NOTE: The bit is set to 1 if a field is NOT NULL. boolean isNull;
if ((nullByte & (1 << (fieldIndex % 8))) == 0) {
// Logically move past this field.
fieldIndex++;
// Every 8 fields we read a new NULL byte.
if (fieldIndex < fieldCount) {
if ((fieldIndex % 8) == 0) {
// Get next null byte.
if (offset >= end) {
throw new EOFException();
}
nullByte = bytes[offset++];
}
}
return false;
} else {
// Make sure there is at least one byte that can be read for a value.
if (offset >= end) {
throw new EOFException();
}
/*
* We have a field and are positioned to it. Read it.
*/
switch(primitiveCategories[fieldIndex]) {
case BOOLEAN:
// No check needed for single byte read.
currentBoolean = (bytes[offset++] != 0);
break;
case BYTE:
// No check needed for single byte read.
currentByte = bytes[offset++];
break;
case SHORT:
// Last item -- ok to be at end.
if (offset + 2 > end) {
throw new EOFException();
}
currentShort = LazyBinaryUtils.byteArrayToShort(bytes, offset);
offset += 2;
break;
case INT:
// Parse the first byte of a vint/vlong to determine the number of bytes.
if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
throw new EOFException();
}
LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
offset += tempVInt.length;
currentInt = tempVInt.value;
break;
case LONG:
// Parse the first byte of a vint/vlong to determine the number of bytes.
if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
throw new EOFException();
}
LazyBinaryUtils.readVLong(bytes, offset, tempVLong);
offset += tempVLong.length;
currentLong = tempVLong.value;
break;
case FLOAT:
// Last item -- ok to be at end.
if (offset + 4 > end) {
throw new EOFException();
}
currentFloat = Float.intBitsToFloat(LazyBinaryUtils.byteArrayToInt(bytes, offset));
offset += 4;
break;
case DOUBLE:
// Last item -- ok to be at end.
if (offset + 8 > end) {
throw new EOFException();
}
currentDouble = Double.longBitsToDouble(LazyBinaryUtils.byteArrayToLong(bytes, offset));
offset += 8;
break;
case BINARY:
case STRING:
case CHAR:
case VARCHAR:
{
// Parse the first byte of a vint/vlong to determine the number of bytes.
if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
throw new EOFException();
}
LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
offset += tempVInt.length;
int saveStart = offset;
int length = tempVInt.value;
offset += length;
// Last item -- ok to be at end.
if (offset > end) {
throw new EOFException();
}
currentBytes = bytes;
currentBytesStart = saveStart;
currentBytesLength = length;
}
break;
case DATE:
// Parse the first byte of a vint/vlong to determine the number of bytes.
if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
throw new EOFException();
}
LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
offset += tempVInt.length;
currentDateWritable.set(tempVInt.value);
break;
case TIMESTAMP:
{
int length = TimestampWritable.getTotalLength(bytes, offset);
int saveStart = offset;
offset += length;
// Last item -- ok to be at end.
if (offset > end) {
throw new EOFException();
}
currentTimestampWritable.set(bytes, saveStart);
}
break;
case INTERVAL_YEAR_MONTH:
// Parse the first byte of a vint/vlong to determine the number of bytes.
if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
throw new EOFException();
}
LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
offset += tempVInt.length;
currentHiveIntervalYearMonthWritable.set(tempVInt.value);
break;
case INTERVAL_DAY_TIME:
// Parse the first byte of a vint/vlong to determine the number of bytes.
if (offset + WritableUtils.decodeVIntSize(bytes[offset]) >= end) {
throw new EOFException();
}
LazyBinaryUtils.readVLong(bytes, offset, tempVLong);
offset += tempVLong.length;
// Parse the first byte of a vint/vlong to determine the number of bytes.
if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
throw new EOFException();
}
LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
offset += tempVInt.length;
currentHiveIntervalDayTimeWritable.set(tempVLong.value, tempVInt.value);
break;
case DECIMAL:
{
// Parse the first byte of a vint/vlong to determine the number of bytes.
if (offset + WritableUtils.decodeVIntSize(bytes[offset]) >= end) {
throw new EOFException();
}
LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
offset += tempVInt.length;
int readScale = tempVInt.value;
// Parse the first byte of a vint/vlong to determine the number of bytes.
if (offset + WritableUtils.decodeVIntSize(bytes[offset]) > end) {
throw new EOFException();
}
LazyBinaryUtils.readVInt(bytes, offset, tempVInt);
offset += tempVInt.length;
int saveStart = offset;
offset += tempVInt.value;
// Last item -- ok to be at end.
if (offset > end) {
throw new EOFException();
}
int length = offset - saveStart;
// scale = 2, length = 6, value = -6065716379.11
// \002\006\255\114\197\131\083\105
// \255\114\197\131\083\105
currentHiveDecimalWritable.setFromBigIntegerBytesAndScale(bytes, saveStart, length, readScale);
boolean decimalIsNull = !currentHiveDecimalWritable.isSet();
if (!decimalIsNull) {
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfos[fieldIndex];
int precision = decimalTypeInfo.getPrecision();
int scale = decimalTypeInfo.getScale();
decimalIsNull = !currentHiveDecimalWritable.mutateEnforcePrecisionScale(precision, scale);
}
if (decimalIsNull) {
// Logically move past this field.
fieldIndex++;
// Every 8 fields we read a new NULL byte.
if (fieldIndex < fieldCount) {
if ((fieldIndex % 8) == 0) {
// Get next null byte.
if (offset >= end) {
throw new EOFException();
}
nullByte = bytes[offset++];
}
}
return false;
}
}
break;
default:
throw new Error("Unexpected primitive category " + primitiveCategories[fieldIndex].name());
}
}
// Logically move past this field.
fieldIndex++;
// Every 8 fields we read a new NULL byte.
if (fieldIndex < fieldCount) {
if ((fieldIndex % 8) == 0) {
// Get next null byte.
if (offset >= end) {
throw new EOFException();
}
nullByte = bytes[offset++];
}
}
return true;
}
use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.
the class WritableConstantHiveDecimalObjectInspector method getWritableConstantValue.
@Override
public HiveDecimalWritable getWritableConstantValue() {
// We need to enforce precision/scale here.
DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) typeInfo;
HiveDecimalWritable result = new HiveDecimalWritable(value);
result.mutateEnforcePrecisionScale(decTypeInfo.precision(), decTypeInfo.scale());
if (!result.isSet()) {
return null;
}
return result;
}
Aggregations