use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestGenericUDFCeil method testDecimal.
@Test
public void testDecimal() throws HiveException {
GenericUDFCeil udf = new GenericUDFCeil();
HiveDecimalWritable input = new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
DecimalTypeInfo inputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(11, 6);
ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
DeferredObject[] args = { new DeferredJavaObject(input) };
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6, 0), oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("32301"), res.getHiveDecimal());
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestGenericUDFAbs method testHiveDecimal.
@Test
public void testHiveDecimal() throws HiveException {
GenericUDFAbs udf = new GenericUDFAbs();
int prec = 12;
int scale = 9;
ObjectInspector valueOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(prec, scale));
ObjectInspector[] arguments = { valueOI };
PrimitiveObjectInspector outputOI = (PrimitiveObjectInspector) udf.initialize(arguments);
// Make sure result precision/scale matches the input prec/scale
assertEquals("result precision for abs()", prec, outputOI.precision());
assertEquals("result scale for abs()", scale, outputOI.scale());
DeferredObject valueObj = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("107.123456789")));
DeferredObject[] args = { valueObj };
HiveDecimalWritable output = (HiveDecimalWritable) udf.evaluate(args);
assertEquals("abs() test for HiveDecimal failed ", 107.123456789, output.getHiveDecimal().doubleValue(), 1e-15);
valueObj = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("-107.123456789")));
args[0] = valueObj;
output = (HiveDecimalWritable) udf.evaluate(args);
assertEquals("abs() test for HiveDecimal failed ", 107.123456789, output.getHiveDecimal().doubleValue(), 1e-15);
// null input
args[0] = new DeferredJavaObject(null);
output = (HiveDecimalWritable) udf.evaluate(args);
assertEquals("abs(null)", null, output);
// if value too large, should also be null
args[0] = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("-1000.123456")));
output = (HiveDecimalWritable) udf.evaluate(args);
assertEquals("abs() of too large decimal value", null, output);
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class DeserializeRead method allocateCurrentWritable.
/*
* This class is used to read one field at a time. Simple fields like long, double, int are read
* into to primitive current* members; the non-simple field types like Date, Timestamp, etc, are
* read into a current object that this method will allocate.
*
* This method handles complex type fields by recursively calling this method.
*/
private void allocateCurrentWritable(TypeInfo typeInfo) {
switch(typeInfo.getCategory()) {
case PRIMITIVE:
switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
case DATE:
if (currentDateWritable == null) {
currentDateWritable = new DateWritableV2();
}
break;
case TIMESTAMP:
if (currentTimestampWritable == null) {
currentTimestampWritable = new TimestampWritableV2();
}
break;
case INTERVAL_YEAR_MONTH:
if (currentHiveIntervalYearMonthWritable == null) {
currentHiveIntervalYearMonthWritable = new HiveIntervalYearMonthWritable();
}
break;
case INTERVAL_DAY_TIME:
if (currentHiveIntervalDayTimeWritable == null) {
currentHiveIntervalDayTimeWritable = new HiveIntervalDayTimeWritable();
}
break;
case DECIMAL:
if (currentHiveDecimalWritable == null) {
currentHiveDecimalWritable = new HiveDecimalWritable();
}
break;
default:
}
break;
case LIST:
allocateCurrentWritable(((ListTypeInfo) typeInfo).getListElementTypeInfo());
break;
case MAP:
allocateCurrentWritable(((MapTypeInfo) typeInfo).getMapKeyTypeInfo());
allocateCurrentWritable(((MapTypeInfo) typeInfo).getMapValueTypeInfo());
break;
case STRUCT:
for (TypeInfo fieldTypeInfo : ((StructTypeInfo) typeInfo).getAllStructFieldTypeInfos()) {
allocateCurrentWritable(fieldTypeInfo);
}
break;
case UNION:
for (TypeInfo fieldTypeInfo : ((UnionTypeInfo) typeInfo).getAllUnionObjectTypeInfos()) {
allocateCurrentWritable(fieldTypeInfo);
}
break;
default:
throw new RuntimeException("Unexpected category " + typeInfo.getCategory());
}
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestTeradataBinarySerdeGeneral method testDeserializeAndSerialize.
@Test
public void testDeserializeAndSerialize() throws Exception {
BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode("00004e6f762020202020201b006120646179203d2031312f31312f31312020202020202020203435ec10000000000000c5feffff" + "7707010000000000002a40ef2b3dab0d14e6531c8908a72700000007b20100313931312d31312d31312031393a32303a32312e34" + "33333230301b00746573743a20202020202020343333322020202020202020333135"));
List<Object> row = (List<Object>) serde.deserialize(in);
Assert.assertEquals("Nov", ((HiveCharWritable) row.get(0)).toString());
Assert.assertEquals("a day = 11/11/11 45", ((HiveVarcharWritable) row.get(1)).toString());
Assert.assertEquals(4332L, ((LongWritable) row.get(2)).get());
Assert.assertEquals(-315, ((IntWritable) row.get(3)).get());
Assert.assertEquals((short) 1911, ((ShortWritable) row.get(4)).get());
Assert.assertEquals((byte) 1, ((ByteWritable) row.get(5)).get());
Assert.assertEquals((double) 13, ((DoubleWritable) row.get(6)).get(), 0);
Assert.assertEquals(30, ((HiveDecimalWritable) row.get(7)).getScale());
Assert.assertEquals((double) 3.141592653589793238462643383279, ((HiveDecimalWritable) row.get(7)).getHiveDecimal().doubleValue(), 0);
Assert.assertEquals("1911-11-11", ((DateWritableV2) row.get(8)).toString());
Assert.assertEquals("1911-11-11 19:20:21.4332", ((TimestampWritableV2) row.get(9)).toString());
Assert.assertEquals(27, ((BytesWritable) row.get(10)).getLength());
BytesWritable res = (BytesWritable) serde.serialize(row, serde.getObjectInspector());
Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes()));
}
use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.
the class TestVectorCastStatement method doIfTestOneCast.
private void doIfTestOneCast(Random random, String typeName, DataTypePhysicalVariation dataTypePhysicalVariation, PrimitiveCategory targetPrimitiveCategory) throws Exception {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
boolean isDecimal64 = (dataTypePhysicalVariation == DataTypePhysicalVariation.DECIMAL_64);
final int decimal64Scale = (isDecimal64 ? ((DecimalTypeInfo) typeInfo).getScale() : 0);
// ----------------------------------------------------------------------------------------------
String targetTypeName;
if (targetPrimitiveCategory == PrimitiveCategory.BYTE) {
targetTypeName = "tinyint";
} else if (targetPrimitiveCategory == PrimitiveCategory.SHORT) {
targetTypeName = "smallint";
} else if (targetPrimitiveCategory == PrimitiveCategory.LONG) {
targetTypeName = "bigint";
} else {
targetTypeName = targetPrimitiveCategory.name().toLowerCase();
}
targetTypeName = VectorRandomRowSource.getDecoratedTypeName(random, targetTypeName);
TypeInfo targetTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(targetTypeName);
// ----------------------------------------------------------------------------------------------
GenerationSpec generationSpec;
if (needsValidDataTypeData(targetTypeInfo) && (primitiveCategory == PrimitiveCategory.STRING || primitiveCategory == PrimitiveCategory.CHAR || primitiveCategory == PrimitiveCategory.VARCHAR)) {
generationSpec = GenerationSpec.createStringFamilyOtherTypeValue(typeInfo, targetTypeInfo);
} else {
generationSpec = GenerationSpec.createSameType(typeInfo);
}
List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
generationSpecList.add(generationSpec);
explicitDataTypePhysicalVariationList.add(dataTypePhysicalVariation);
VectorRandomRowSource rowSource = new VectorRandomRowSource();
rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
0, /* allowNull */
true, /* isUnicodeOk */
true, explicitDataTypePhysicalVariationList);
List<String> columns = new ArrayList<String>();
columns.add("col1");
ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(typeInfo, "col1", "table", false);
List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
children.add(col1Expr);
String[] columnNames = columns.toArray(new String[0]);
Object[][] randomRows = rowSource.randomRows(100000);
VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
final int rowCount = randomRows.length;
Object[][] resultObjectsArray = new Object[CastStmtTestMode.count][];
for (int i = 0; i < CastStmtTestMode.count; i++) {
Object[] resultObjects = new Object[rowCount];
resultObjectsArray[i] = resultObjects;
CastStmtTestMode ifStmtTestMode = CastStmtTestMode.values()[i];
switch(ifStmtTestMode) {
case ROW_MODE:
if (!doRowCastTest(typeInfo, targetTypeInfo, columns, children, randomRows, rowSource.rowStructObjectInspector(), resultObjects)) {
return;
}
break;
case ADAPTOR:
case VECTOR_EXPRESSION:
if (!doVectorCastTest(typeInfo, targetTypeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, ifStmtTestMode, batchSource, resultObjects)) {
return;
}
break;
default:
throw new RuntimeException("Unexpected IF statement test mode " + ifStmtTestMode);
}
}
for (int i = 0; i < rowCount; i++) {
// Row-mode is the expected value.
Object expectedResult = resultObjectsArray[0][i];
for (int v = 1; v < CastStmtTestMode.count; v++) {
Object vectorResult = resultObjectsArray[v][i];
if (expectedResult == null || vectorResult == null) {
if (expectedResult != null || vectorResult != null) {
Assert.fail("Row " + i + " sourceTypeName " + typeName + " targetTypeName " + targetTypeName + " " + CastStmtTestMode.values()[v] + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]));
}
} else {
if (isDecimal64 && expectedResult instanceof LongWritable) {
HiveDecimalWritable expectedHiveDecimalWritable = new HiveDecimalWritable(0);
expectedHiveDecimalWritable.deserialize64(((LongWritable) expectedResult).get(), decimal64Scale);
expectedResult = expectedHiveDecimalWritable;
}
if (!expectedResult.equals(vectorResult)) {
Assert.fail("Row " + i + " sourceTypeName " + typeName + " targetTypeName " + targetTypeName + " " + CastStmtTestMode.values()[v] + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
}
}
}
}
}
Aggregations