Search in sources :

Example 71 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class TestGenericUDFCeil method testDecimal.

@Test
public void testDecimal() throws HiveException {
    GenericUDFCeil udf = new GenericUDFCeil();
    HiveDecimalWritable input = new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
    DecimalTypeInfo inputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(11, 6);
    ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo) };
    DeferredObject[] args = { new DeferredJavaObject(input) };
    PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
    Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6, 0), oi.getTypeInfo());
    HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
    Assert.assertEquals(HiveDecimal.create("32301"), res.getHiveDecimal());
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) Test(org.junit.Test)

Example 72 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class TestGenericUDFAbs method testHiveDecimal.

@Test
public void testHiveDecimal() throws HiveException {
    GenericUDFAbs udf = new GenericUDFAbs();
    int prec = 12;
    int scale = 9;
    ObjectInspector valueOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(prec, scale));
    ObjectInspector[] arguments = { valueOI };
    PrimitiveObjectInspector outputOI = (PrimitiveObjectInspector) udf.initialize(arguments);
    // Make sure result precision/scale matches the input prec/scale
    assertEquals("result precision for abs()", prec, outputOI.precision());
    assertEquals("result scale for abs()", scale, outputOI.scale());
    DeferredObject valueObj = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("107.123456789")));
    DeferredObject[] args = { valueObj };
    HiveDecimalWritable output = (HiveDecimalWritable) udf.evaluate(args);
    assertEquals("abs() test for HiveDecimal failed ", 107.123456789, output.getHiveDecimal().doubleValue(), 1e-15);
    valueObj = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("-107.123456789")));
    args[0] = valueObj;
    output = (HiveDecimalWritable) udf.evaluate(args);
    assertEquals("abs() test for HiveDecimal failed ", 107.123456789, output.getHiveDecimal().doubleValue(), 1e-15);
    // null input
    args[0] = new DeferredJavaObject(null);
    output = (HiveDecimalWritable) udf.evaluate(args);
    assertEquals("abs(null)", null, output);
    // if value too large, should also be null
    args[0] = new DeferredJavaObject(new HiveDecimalWritable(HiveDecimal.create("-1000.123456")));
    output = (HiveDecimalWritable) udf.evaluate(args);
    assertEquals("abs() of too large decimal value", null, output);
}
Also used : PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) DeferredJavaObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject) GenericUDFAbs(org.apache.hadoop.hive.ql.udf.generic.GenericUDFAbs) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DeferredObject(org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) Test(org.junit.Test)

Example 73 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class DeserializeRead method allocateCurrentWritable.

/*
   * This class is used to read one field at a time.  Simple fields like long, double, int are read
   * into to primitive current* members; the non-simple field types like Date, Timestamp, etc, are
   * read into a current object that this method will allocate.
   *
   * This method handles complex type fields by recursively calling this method.
   */
private void allocateCurrentWritable(TypeInfo typeInfo) {
    switch(typeInfo.getCategory()) {
        case PRIMITIVE:
            switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
                case DATE:
                    if (currentDateWritable == null) {
                        currentDateWritable = new DateWritableV2();
                    }
                    break;
                case TIMESTAMP:
                    if (currentTimestampWritable == null) {
                        currentTimestampWritable = new TimestampWritableV2();
                    }
                    break;
                case INTERVAL_YEAR_MONTH:
                    if (currentHiveIntervalYearMonthWritable == null) {
                        currentHiveIntervalYearMonthWritable = new HiveIntervalYearMonthWritable();
                    }
                    break;
                case INTERVAL_DAY_TIME:
                    if (currentHiveIntervalDayTimeWritable == null) {
                        currentHiveIntervalDayTimeWritable = new HiveIntervalDayTimeWritable();
                    }
                    break;
                case DECIMAL:
                    if (currentHiveDecimalWritable == null) {
                        currentHiveDecimalWritable = new HiveDecimalWritable();
                    }
                    break;
                default:
            }
            break;
        case LIST:
            allocateCurrentWritable(((ListTypeInfo) typeInfo).getListElementTypeInfo());
            break;
        case MAP:
            allocateCurrentWritable(((MapTypeInfo) typeInfo).getMapKeyTypeInfo());
            allocateCurrentWritable(((MapTypeInfo) typeInfo).getMapValueTypeInfo());
            break;
        case STRUCT:
            for (TypeInfo fieldTypeInfo : ((StructTypeInfo) typeInfo).getAllStructFieldTypeInfos()) {
                allocateCurrentWritable(fieldTypeInfo);
            }
            break;
        case UNION:
            for (TypeInfo fieldTypeInfo : ((UnionTypeInfo) typeInfo).getAllUnionObjectTypeInfos()) {
                allocateCurrentWritable(fieldTypeInfo);
            }
            break;
        default:
            throw new RuntimeException("Unexpected category " + typeInfo.getCategory());
    }
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritableV2(org.apache.hadoop.hive.serde2.io.DateWritableV2) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) TimestampWritableV2(org.apache.hadoop.hive.serde2.io.TimestampWritableV2) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 74 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class TestTeradataBinarySerdeGeneral method testDeserializeAndSerialize.

@Test
public void testDeserializeAndSerialize() throws Exception {
    BytesWritable in = new BytesWritable(BaseEncoding.base16().lowerCase().decode("00004e6f762020202020201b006120646179203d2031312f31312f31312020202020202020203435ec10000000000000c5feffff" + "7707010000000000002a40ef2b3dab0d14e6531c8908a72700000007b20100313931312d31312d31312031393a32303a32312e34" + "33333230301b00746573743a20202020202020343333322020202020202020333135"));
    List<Object> row = (List<Object>) serde.deserialize(in);
    Assert.assertEquals("Nov", ((HiveCharWritable) row.get(0)).toString());
    Assert.assertEquals("a day = 11/11/11         45", ((HiveVarcharWritable) row.get(1)).toString());
    Assert.assertEquals(4332L, ((LongWritable) row.get(2)).get());
    Assert.assertEquals(-315, ((IntWritable) row.get(3)).get());
    Assert.assertEquals((short) 1911, ((ShortWritable) row.get(4)).get());
    Assert.assertEquals((byte) 1, ((ByteWritable) row.get(5)).get());
    Assert.assertEquals((double) 13, ((DoubleWritable) row.get(6)).get(), 0);
    Assert.assertEquals(30, ((HiveDecimalWritable) row.get(7)).getScale());
    Assert.assertEquals((double) 3.141592653589793238462643383279, ((HiveDecimalWritable) row.get(7)).getHiveDecimal().doubleValue(), 0);
    Assert.assertEquals("1911-11-11", ((DateWritableV2) row.get(8)).toString());
    Assert.assertEquals("1911-11-11 19:20:21.4332", ((TimestampWritableV2) row.get(9)).toString());
    Assert.assertEquals(27, ((BytesWritable) row.get(10)).getLength());
    BytesWritable res = (BytesWritable) serde.serialize(row, serde.getObjectInspector());
    Assert.assertTrue(Arrays.equals(in.copyBytes(), res.copyBytes()));
}
Also used : HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) List(java.util.List) Test(org.junit.Test)

Example 75 with HiveDecimalWritable

use of org.apache.hadoop.hive.serde2.io.HiveDecimalWritable in project hive by apache.

the class TestVectorCastStatement method doIfTestOneCast.

private void doIfTestOneCast(Random random, String typeName, DataTypePhysicalVariation dataTypePhysicalVariation, PrimitiveCategory targetPrimitiveCategory) throws Exception {
    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
    PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory();
    boolean isDecimal64 = (dataTypePhysicalVariation == DataTypePhysicalVariation.DECIMAL_64);
    final int decimal64Scale = (isDecimal64 ? ((DecimalTypeInfo) typeInfo).getScale() : 0);
    // ----------------------------------------------------------------------------------------------
    String targetTypeName;
    if (targetPrimitiveCategory == PrimitiveCategory.BYTE) {
        targetTypeName = "tinyint";
    } else if (targetPrimitiveCategory == PrimitiveCategory.SHORT) {
        targetTypeName = "smallint";
    } else if (targetPrimitiveCategory == PrimitiveCategory.LONG) {
        targetTypeName = "bigint";
    } else {
        targetTypeName = targetPrimitiveCategory.name().toLowerCase();
    }
    targetTypeName = VectorRandomRowSource.getDecoratedTypeName(random, targetTypeName);
    TypeInfo targetTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(targetTypeName);
    // ----------------------------------------------------------------------------------------------
    GenerationSpec generationSpec;
    if (needsValidDataTypeData(targetTypeInfo) && (primitiveCategory == PrimitiveCategory.STRING || primitiveCategory == PrimitiveCategory.CHAR || primitiveCategory == PrimitiveCategory.VARCHAR)) {
        generationSpec = GenerationSpec.createStringFamilyOtherTypeValue(typeInfo, targetTypeInfo);
    } else {
        generationSpec = GenerationSpec.createSameType(typeInfo);
    }
    List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>();
    List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>();
    generationSpecList.add(generationSpec);
    explicitDataTypePhysicalVariationList.add(dataTypePhysicalVariation);
    VectorRandomRowSource rowSource = new VectorRandomRowSource();
    rowSource.initGenerationSpecSchema(random, generationSpecList, /* maxComplexDepth */
    0, /* allowNull */
    true, /* isUnicodeOk */
    true, explicitDataTypePhysicalVariationList);
    List<String> columns = new ArrayList<String>();
    columns.add("col1");
    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(typeInfo, "col1", "table", false);
    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
    children.add(col1Expr);
    String[] columnNames = columns.toArray(new String[0]);
    Object[][] randomRows = rowSource.randomRows(100000);
    VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches(random, rowSource, randomRows, null);
    final int rowCount = randomRows.length;
    Object[][] resultObjectsArray = new Object[CastStmtTestMode.count][];
    for (int i = 0; i < CastStmtTestMode.count; i++) {
        Object[] resultObjects = new Object[rowCount];
        resultObjectsArray[i] = resultObjects;
        CastStmtTestMode ifStmtTestMode = CastStmtTestMode.values()[i];
        switch(ifStmtTestMode) {
            case ROW_MODE:
                if (!doRowCastTest(typeInfo, targetTypeInfo, columns, children, randomRows, rowSource.rowStructObjectInspector(), resultObjects)) {
                    return;
                }
                break;
            case ADAPTOR:
            case VECTOR_EXPRESSION:
                if (!doVectorCastTest(typeInfo, targetTypeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, ifStmtTestMode, batchSource, resultObjects)) {
                    return;
                }
                break;
            default:
                throw new RuntimeException("Unexpected IF statement test mode " + ifStmtTestMode);
        }
    }
    for (int i = 0; i < rowCount; i++) {
        // Row-mode is the expected value.
        Object expectedResult = resultObjectsArray[0][i];
        for (int v = 1; v < CastStmtTestMode.count; v++) {
            Object vectorResult = resultObjectsArray[v][i];
            if (expectedResult == null || vectorResult == null) {
                if (expectedResult != null || vectorResult != null) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " targetTypeName " + targetTypeName + " " + CastStmtTestMode.values()[v] + " result is NULL " + (vectorResult == null ? "YES" : "NO result " + vectorResult.toString()) + " does not match row-mode expected result is NULL " + (expectedResult == null ? "YES" : "NO result " + expectedResult.toString()) + " row values " + Arrays.toString(randomRows[i]));
                }
            } else {
                if (isDecimal64 && expectedResult instanceof LongWritable) {
                    HiveDecimalWritable expectedHiveDecimalWritable = new HiveDecimalWritable(0);
                    expectedHiveDecimalWritable.deserialize64(((LongWritable) expectedResult).get(), decimal64Scale);
                    expectedResult = expectedHiveDecimalWritable;
                }
                if (!expectedResult.equals(vectorResult)) {
                    Assert.fail("Row " + i + " sourceTypeName " + typeName + " targetTypeName " + targetTypeName + " " + CastStmtTestMode.values()[v] + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i]));
                }
            }
        }
    }
}
Also used : ArrayList(java.util.ArrayList) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DataTypePhysicalVariation(org.apache.hadoop.hive.common.type.DataTypePhysicalVariation) ExprNodeColumnDesc(org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) LongWritable(org.apache.hadoop.io.LongWritable) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) VectorRandomBatchSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) GenerationSpec(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) VectorRandomRowSource(org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource)

Aggregations

HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)185 Test (org.junit.Test)42 LongWritable (org.apache.hadoop.io.LongWritable)39 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)36 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)35 IntWritable (org.apache.hadoop.io.IntWritable)35 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)34 DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)31 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)30 Text (org.apache.hadoop.io.Text)30 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)28 BytesWritable (org.apache.hadoop.io.BytesWritable)28 FloatWritable (org.apache.hadoop.io.FloatWritable)28 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)27 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)27 BooleanWritable (org.apache.hadoop.io.BooleanWritable)27 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)26 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)26 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)26 DecimalColumnVector (org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector)25