Search in sources :

Example 1 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class GenericUDTFGetSplits method convertTypeString.

private TypeDesc convertTypeString(String typeString) throws HiveException {
    TypeDesc typeDesc;
    TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeString);
    Preconditions.checkState(typeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE, "Unsupported non-primitive type " + typeString);
    switch(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) {
        case BOOLEAN:
            typeDesc = new TypeDesc(TypeDesc.Type.BOOLEAN);
            break;
        case BYTE:
            typeDesc = new TypeDesc(TypeDesc.Type.TINYINT);
            break;
        case SHORT:
            typeDesc = new TypeDesc(TypeDesc.Type.SMALLINT);
            break;
        case INT:
            typeDesc = new TypeDesc(TypeDesc.Type.INT);
            break;
        case LONG:
            typeDesc = new TypeDesc(TypeDesc.Type.BIGINT);
            break;
        case FLOAT:
            typeDesc = new TypeDesc(TypeDesc.Type.FLOAT);
            break;
        case DOUBLE:
            typeDesc = new TypeDesc(TypeDesc.Type.DOUBLE);
            break;
        case STRING:
            typeDesc = new TypeDesc(TypeDesc.Type.STRING);
            break;
        case CHAR:
            CharTypeInfo charTypeInfo = (CharTypeInfo) typeInfo;
            typeDesc = new TypeDesc(TypeDesc.Type.CHAR, charTypeInfo.getLength());
            break;
        case VARCHAR:
            VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) typeInfo;
            typeDesc = new TypeDesc(TypeDesc.Type.VARCHAR, varcharTypeInfo.getLength());
            break;
        case DATE:
            typeDesc = new TypeDesc(TypeDesc.Type.DATE);
            break;
        case TIMESTAMP:
            typeDesc = new TypeDesc(TypeDesc.Type.TIMESTAMP);
            break;
        case BINARY:
            typeDesc = new TypeDesc(TypeDesc.Type.BINARY);
            break;
        case DECIMAL:
            DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo;
            typeDesc = new TypeDesc(TypeDesc.Type.DECIMAL, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale());
            break;
        default:
            throw new HiveException("Unsupported type " + typeString);
    }
    return typeDesc;
}
Also used : DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) TypeDesc(org.apache.hadoop.hive.llap.TypeDesc) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)

Example 2 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class TestVectorSerDeRow method serializeRow.

private Output serializeRow(Object[] row, VectorRandomRowSource source, SerializeWrite serializeWrite) throws HiveException, IOException {
    Output output = new Output();
    serializeWrite.set(output);
    PrimitiveTypeInfo[] primitiveTypeInfos = source.primitiveTypeInfos();
    for (int i = 0; i < primitiveTypeInfos.length; i++) {
        Object object = row[i];
        PrimitiveCategory primitiveCategory = primitiveTypeInfos[i].getPrimitiveCategory();
        switch(primitiveCategory) {
            case BOOLEAN:
                {
                    BooleanWritable expectedWritable = (BooleanWritable) object;
                    boolean value = expectedWritable.get();
                    serializeWrite.writeBoolean(value);
                }
                break;
            case BYTE:
                {
                    ByteWritable expectedWritable = (ByteWritable) object;
                    byte value = expectedWritable.get();
                    serializeWrite.writeByte(value);
                }
                break;
            case SHORT:
                {
                    ShortWritable expectedWritable = (ShortWritable) object;
                    short value = expectedWritable.get();
                    serializeWrite.writeShort(value);
                }
                break;
            case INT:
                {
                    IntWritable expectedWritable = (IntWritable) object;
                    int value = expectedWritable.get();
                    serializeWrite.writeInt(value);
                }
                break;
            case LONG:
                {
                    LongWritable expectedWritable = (LongWritable) object;
                    long value = expectedWritable.get();
                    serializeWrite.writeLong(value);
                }
                break;
            case DATE:
                {
                    DateWritable expectedWritable = (DateWritable) object;
                    Date value = expectedWritable.get();
                    serializeWrite.writeDate(value);
                }
                break;
            case FLOAT:
                {
                    FloatWritable expectedWritable = (FloatWritable) object;
                    float value = expectedWritable.get();
                    serializeWrite.writeFloat(value);
                }
                break;
            case DOUBLE:
                {
                    DoubleWritable expectedWritable = (DoubleWritable) object;
                    double value = expectedWritable.get();
                    serializeWrite.writeDouble(value);
                }
                break;
            case STRING:
                {
                    Text text = (Text) object;
                    serializeWrite.writeString(text.getBytes(), 0, text.getLength());
                }
                break;
            case CHAR:
                {
                    HiveCharWritable expectedWritable = (HiveCharWritable) object;
                    HiveChar value = expectedWritable.getHiveChar();
                    serializeWrite.writeHiveChar(value);
                }
                break;
            case VARCHAR:
                {
                    HiveVarcharWritable expectedWritable = (HiveVarcharWritable) object;
                    HiveVarchar value = expectedWritable.getHiveVarchar();
                    serializeWrite.writeHiveVarchar(value);
                }
                break;
            case BINARY:
                {
                    BytesWritable expectedWritable = (BytesWritable) object;
                    byte[] bytes = expectedWritable.getBytes();
                    int length = expectedWritable.getLength();
                    serializeWrite.writeBinary(bytes, 0, length);
                }
                break;
            case TIMESTAMP:
                {
                    TimestampWritable expectedWritable = (TimestampWritable) object;
                    Timestamp value = expectedWritable.getTimestamp();
                    serializeWrite.writeTimestamp(value);
                }
                break;
            case INTERVAL_YEAR_MONTH:
                {
                    HiveIntervalYearMonthWritable expectedWritable = (HiveIntervalYearMonthWritable) object;
                    HiveIntervalYearMonth value = expectedWritable.getHiveIntervalYearMonth();
                    serializeWrite.writeHiveIntervalYearMonth(value);
                }
                break;
            case INTERVAL_DAY_TIME:
                {
                    HiveIntervalDayTimeWritable expectedWritable = (HiveIntervalDayTimeWritable) object;
                    HiveIntervalDayTime value = expectedWritable.getHiveIntervalDayTime();
                    serializeWrite.writeHiveIntervalDayTime(value);
                }
                break;
            case DECIMAL:
                {
                    HiveDecimalWritable expectedWritable = (HiveDecimalWritable) object;
                    HiveDecimal value = expectedWritable.getHiveDecimal();
                    serializeWrite.writeHiveDecimal(value, ((DecimalTypeInfo) primitiveTypeInfos[i]).scale());
                }
                break;
            default:
                throw new HiveException("Unexpected primitive category " + primitiveCategory);
        }
    }
    return output;
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) Output(org.apache.hadoop.hive.serde2.ByteStream.Output) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) Date(java.sql.Date) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Example 3 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class TestVectorSerDeRow method deserializeAndVerify.

void deserializeAndVerify(Output output, DeserializeRead deserializeRead, VectorRandomRowSource source, Object[] expectedRow) throws HiveException, IOException {
    deserializeRead.set(output.getData(), 0, output.getLength());
    PrimitiveCategory[] primitiveCategories = source.primitiveCategories();
    for (int i = 0; i < primitiveCategories.length; i++) {
        Object expected = expectedRow[i];
        PrimitiveCategory primitiveCategory = primitiveCategories[i];
        PrimitiveTypeInfo primitiveTypeInfo = source.primitiveTypeInfos()[i];
        if (!deserializeRead.readNextField()) {
            throw new HiveException("Unexpected NULL when reading primitiveCategory " + primitiveCategory + " expected (" + expected.getClass().getName() + ", " + expected.toString() + ") " + " deserializeRead " + deserializeRead.getClass().getName());
        }
        switch(primitiveCategory) {
            case BOOLEAN:
                {
                    Boolean value = deserializeRead.currentBoolean;
                    BooleanWritable expectedWritable = (BooleanWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case BYTE:
                {
                    Byte value = deserializeRead.currentByte;
                    ByteWritable expectedWritable = (ByteWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Byte field mismatch (expected " + (int) expected + " found " + (int) value + ")");
                    }
                }
                break;
            case SHORT:
                {
                    Short value = deserializeRead.currentShort;
                    ShortWritable expectedWritable = (ShortWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Short field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case INT:
                {
                    Integer value = deserializeRead.currentInt;
                    IntWritable expectedWritable = (IntWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case LONG:
                {
                    Long value = deserializeRead.currentLong;
                    LongWritable expectedWritable = (LongWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case DATE:
                {
                    DateWritable value = deserializeRead.currentDateWritable;
                    DateWritable expectedWritable = (DateWritable) expected;
                    if (!value.equals(expectedWritable)) {
                        TestCase.fail("Date field mismatch (expected " + expected.toString() + " found " + value.toString() + ")");
                    }
                }
                break;
            case FLOAT:
                {
                    Float value = deserializeRead.currentFloat;
                    FloatWritable expectedWritable = (FloatWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Float field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case DOUBLE:
                {
                    Double value = deserializeRead.currentDouble;
                    DoubleWritable expectedWritable = (DoubleWritable) expected;
                    if (!value.equals(expectedWritable.get())) {
                        TestCase.fail("Double field mismatch (expected " + expected + " found " + value + ")");
                    }
                }
                break;
            case STRING:
            case CHAR:
            case VARCHAR:
            case BINARY:
                {
                    byte[] stringBytes = Arrays.copyOfRange(deserializeRead.currentBytes, deserializeRead.currentBytesStart, deserializeRead.currentBytesStart + deserializeRead.currentBytesLength);
                    Text text = new Text(stringBytes);
                    String string = text.toString();
                    switch(primitiveCategory) {
                        case STRING:
                            {
                                Text expectedWritable = (Text) expected;
                                if (!string.equals(expectedWritable.toString())) {
                                    TestCase.fail("String field mismatch (expected '" + expectedWritable.toString() + "' found '" + string + "')");
                                }
                            }
                            break;
                        case CHAR:
                            {
                                HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength());
                                HiveCharWritable expectedWritable = (HiveCharWritable) expected;
                                if (!hiveChar.equals(expectedWritable.getHiveChar())) {
                                    TestCase.fail("Char field mismatch (expected '" + expectedWritable.getHiveChar() + "' found '" + hiveChar + "')");
                                }
                            }
                            break;
                        case VARCHAR:
                            {
                                HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                                HiveVarcharWritable expectedWritable = (HiveVarcharWritable) expected;
                                if (!hiveVarchar.equals(expectedWritable.getHiveVarchar())) {
                                    TestCase.fail("Varchar field mismatch (expected '" + expectedWritable.getHiveVarchar() + "' found '" + hiveVarchar + "')");
                                }
                            }
                            break;
                        case BINARY:
                            {
                                BytesWritable expectedWritable = (BytesWritable) expected;
                                if (stringBytes.length != expectedWritable.getLength()) {
                                    TestCase.fail("Byte Array field mismatch (expected " + expected + " found " + stringBytes + ")");
                                }
                                byte[] expectedBytes = expectedWritable.getBytes();
                                for (int b = 0; b < stringBytes.length; b++) {
                                    if (stringBytes[b] != expectedBytes[b]) {
                                        TestCase.fail("Byte Array field mismatch (expected " + expected + " found " + stringBytes + ")");
                                    }
                                }
                            }
                            break;
                        default:
                            throw new HiveException("Unexpected primitive category " + primitiveCategory);
                    }
                }
                break;
            case DECIMAL:
                {
                    HiveDecimal value = deserializeRead.currentHiveDecimalWritable.getHiveDecimal();
                    if (value == null) {
                        TestCase.fail("Decimal field evaluated to NULL");
                    }
                    HiveDecimalWritable expectedWritable = (HiveDecimalWritable) expected;
                    if (!value.equals(expectedWritable.getHiveDecimal())) {
                        DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
                        int precision = decimalTypeInfo.getPrecision();
                        int scale = decimalTypeInfo.getScale();
                        TestCase.fail("Decimal field mismatch (expected " + expectedWritable.getHiveDecimal() + " found " + value.toString() + ") precision " + precision + ", scale " + scale);
                    }
                }
                break;
            case TIMESTAMP:
                {
                    Timestamp value = deserializeRead.currentTimestampWritable.getTimestamp();
                    TimestampWritable expectedWritable = (TimestampWritable) expected;
                    if (!value.equals(expectedWritable.getTimestamp())) {
                        TestCase.fail("Timestamp field mismatch (expected " + expectedWritable.getTimestamp() + " found " + value.toString() + ")");
                    }
                }
                break;
            case INTERVAL_YEAR_MONTH:
                {
                    HiveIntervalYearMonth value = deserializeRead.currentHiveIntervalYearMonthWritable.getHiveIntervalYearMonth();
                    HiveIntervalYearMonthWritable expectedWritable = (HiveIntervalYearMonthWritable) expected;
                    HiveIntervalYearMonth expectedValue = expectedWritable.getHiveIntervalYearMonth();
                    if (!value.equals(expectedValue)) {
                        TestCase.fail("HiveIntervalYearMonth field mismatch (expected " + expectedValue + " found " + value.toString() + ")");
                    }
                }
                break;
            case INTERVAL_DAY_TIME:
                {
                    HiveIntervalDayTime value = deserializeRead.currentHiveIntervalDayTimeWritable.getHiveIntervalDayTime();
                    HiveIntervalDayTimeWritable expectedWritable = (HiveIntervalDayTimeWritable) expected;
                    HiveIntervalDayTime expectedValue = expectedWritable.getHiveIntervalDayTime();
                    if (!value.equals(expectedValue)) {
                        TestCase.fail("HiveIntervalDayTime field mismatch (expected " + expectedValue + " found " + value.toString() + ")");
                    }
                }
                break;
            default:
                throw new HiveException("Unexpected primitive category " + primitiveCategory);
        }
    }
    TestCase.assertTrue(deserializeRead.isEndOfInputReached());
}
Also used : HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveIntervalDayTime(org.apache.hadoop.hive.common.type.HiveIntervalDayTime) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveIntervalDayTimeWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) HiveIntervalYearMonthWritable(org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FloatWritable(org.apache.hadoop.io.FloatWritable) HiveIntervalYearMonth(org.apache.hadoop.hive.common.type.HiveIntervalYearMonth) BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Example 4 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class VectorRandomRowSource method getWritableObject.

public static Object getWritableObject(int column, Object object, List<ObjectInspector> primitiveObjectInspectorList, PrimitiveCategory[] primitiveCategories, PrimitiveTypeInfo[] primitiveTypeInfos) {
    ObjectInspector objectInspector = primitiveObjectInspectorList.get(column);
    PrimitiveCategory primitiveCategory = primitiveCategories[column];
    PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[column];
    switch(primitiveCategory) {
        case BOOLEAN:
            return ((WritableBooleanObjectInspector) objectInspector).create((boolean) object);
        case BYTE:
            return ((WritableByteObjectInspector) objectInspector).create((byte) object);
        case SHORT:
            return ((WritableShortObjectInspector) objectInspector).create((short) object);
        case INT:
            return ((WritableIntObjectInspector) objectInspector).create((int) object);
        case LONG:
            return ((WritableLongObjectInspector) objectInspector).create((long) object);
        case DATE:
            return ((WritableDateObjectInspector) objectInspector).create((Date) object);
        case FLOAT:
            return ((WritableFloatObjectInspector) objectInspector).create((float) object);
        case DOUBLE:
            return ((WritableDoubleObjectInspector) objectInspector).create((double) object);
        case STRING:
            return ((WritableStringObjectInspector) objectInspector).create((String) object);
        case CHAR:
            {
                WritableHiveCharObjectInspector writableCharObjectInspector = new WritableHiveCharObjectInspector((CharTypeInfo) primitiveTypeInfo);
                return writableCharObjectInspector.create((HiveChar) object);
            }
        case VARCHAR:
            {
                WritableHiveVarcharObjectInspector writableVarcharObjectInspector = new WritableHiveVarcharObjectInspector((VarcharTypeInfo) primitiveTypeInfo);
                return writableVarcharObjectInspector.create((HiveVarchar) object);
            }
        case BINARY:
            return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector.create((byte[]) object);
        case TIMESTAMP:
            return ((WritableTimestampObjectInspector) objectInspector).create((Timestamp) object);
        case INTERVAL_YEAR_MONTH:
            return ((WritableHiveIntervalYearMonthObjectInspector) objectInspector).create((HiveIntervalYearMonth) object);
        case INTERVAL_DAY_TIME:
            return ((WritableHiveIntervalDayTimeObjectInspector) objectInspector).create((HiveIntervalDayTime) object);
        case DECIMAL:
            {
                WritableHiveDecimalObjectInspector writableDecimalObjectInspector = new WritableHiveDecimalObjectInspector((DecimalTypeInfo) primitiveTypeInfo);
                HiveDecimalWritable result = (HiveDecimalWritable) writableDecimalObjectInspector.create((HiveDecimal) object);
                return result;
            }
        default:
            throw new Error("Unknown primitive category " + primitiveCategory);
    }
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) WritableHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector) WritableLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) WritableHiveIntervalDayTimeObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalDayTimeObjectInspector) WritableHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) WritableFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableFloatObjectInspector) WritableStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector) WritableShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) WritableHiveIntervalYearMonthObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalYearMonthObjectInspector) WritableHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector) WritableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector) WritableByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableByteObjectInspector) WritableHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector) WritableHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector) WritableBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector) WritableTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector) WritableHiveIntervalDayTimeObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalDayTimeObjectInspector) WritableShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableShortObjectInspector) WritableDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) WritableHiveIntervalYearMonthObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveIntervalYearMonthObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) WritableFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableFloatObjectInspector) WritableLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableLongObjectInspector) WritableDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector) WritableHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveDecimalObjectInspector) WritableStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector) WritableTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableTimestampObjectInspector) WritableBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBooleanObjectInspector) WritableByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableByteObjectInspector) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) WritableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector) WritableDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDateObjectInspector) WritableDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableDoubleObjectInspector)

Example 5 with DecimalTypeInfo

use of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo in project hive by apache.

the class VectorRandomRowSource method randomObject.

public static Object randomObject(int column, Random r, PrimitiveCategory[] primitiveCategories, PrimitiveTypeInfo[] primitiveTypeInfos, String[] alphabets, boolean addEscapables, String needsEscapeStr) {
    PrimitiveCategory primitiveCategory = primitiveCategories[column];
    PrimitiveTypeInfo primitiveTypeInfo = primitiveTypeInfos[column];
    try {
        switch(primitiveCategory) {
            case BOOLEAN:
                return Boolean.valueOf(r.nextInt(1) == 1);
            case BYTE:
                return Byte.valueOf((byte) r.nextInt());
            case SHORT:
                return Short.valueOf((short) r.nextInt());
            case INT:
                return Integer.valueOf(r.nextInt());
            case LONG:
                return Long.valueOf(r.nextLong());
            case DATE:
                return RandomTypeUtil.getRandDate(r);
            case FLOAT:
                return Float.valueOf(r.nextFloat() * 10 - 5);
            case DOUBLE:
                return Double.valueOf(r.nextDouble() * 10 - 5);
            case STRING:
            case CHAR:
            case VARCHAR:
                {
                    String result;
                    if (alphabets != null && alphabets[column] != null) {
                        result = RandomTypeUtil.getRandString(r, alphabets[column], r.nextInt(10));
                    } else {
                        result = RandomTypeUtil.getRandString(r);
                    }
                    if (addEscapables && result.length() > 0) {
                        int escapeCount = 1 + r.nextInt(2);
                        for (int i = 0; i < escapeCount; i++) {
                            int index = r.nextInt(result.length());
                            String begin = result.substring(0, index);
                            String end = result.substring(index);
                            Character needsEscapeChar = needsEscapeStr.charAt(r.nextInt(needsEscapeStr.length()));
                            result = begin + needsEscapeChar + end;
                        }
                    }
                    switch(primitiveCategory) {
                        case STRING:
                            return result;
                        case CHAR:
                            return new HiveChar(result, ((CharTypeInfo) primitiveTypeInfo).getLength());
                        case VARCHAR:
                            return new HiveVarchar(result, ((VarcharTypeInfo) primitiveTypeInfo).getLength());
                        default:
                            throw new Error("Unknown primitive category " + primitiveCategory);
                    }
                }
            case BINARY:
                return getRandBinary(r, 1 + r.nextInt(100));
            case TIMESTAMP:
                return RandomTypeUtil.getRandTimestamp(r);
            case INTERVAL_YEAR_MONTH:
                return getRandIntervalYearMonth(r);
            case INTERVAL_DAY_TIME:
                return getRandIntervalDayTime(r);
            case DECIMAL:
                return getRandHiveDecimal(r, (DecimalTypeInfo) primitiveTypeInfo);
            default:
                throw new Error("Unknown primitive category " + primitiveCategory);
        }
    } catch (Exception e) {
        throw new RuntimeException("randomObject failed on column " + column + " type " + primitiveCategory, e);
    }
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) PrimitiveCategory(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Aggregations

DecimalTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo)40 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)14 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)14 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)13 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)12 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)12 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)11 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)10 IntWritable (org.apache.hadoop.io.IntWritable)10 Test (org.junit.Test)10 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)9 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)9 PrimitiveCategory (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory)9 Timestamp (java.sql.Timestamp)8 Date (java.sql.Date)7 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)7 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)7 BooleanWritable (org.apache.hadoop.io.BooleanWritable)7 BytesWritable (org.apache.hadoop.io.BytesWritable)7 FloatWritable (org.apache.hadoop.io.FloatWritable)7