Search in sources :

Example 86 with DoubleWritable

use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project nifi by apache.

the class NiFiOrcUtils method convertToORCObject.

public static Object convertToORCObject(TypeInfo typeInfo, Object o) {
    if (o != null) {
        if (typeInfo instanceof UnionTypeInfo) {
            OrcUnion union = new OrcUnion();
            // Need to find which of the union types correspond to the primitive object
            TypeInfo objectTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(ObjectInspectorFactory.getReflectionObjectInspector(o.getClass(), ObjectInspectorFactory.ObjectInspectorOptions.JAVA));
            List<TypeInfo> unionTypeInfos = ((UnionTypeInfo) typeInfo).getAllUnionObjectTypeInfos();
            int index = 0;
            while (index < unionTypeInfos.size() && !unionTypeInfos.get(index).equals(objectTypeInfo)) {
                index++;
            }
            if (index < unionTypeInfos.size()) {
                union.set((byte) index, convertToORCObject(objectTypeInfo, o));
            } else {
                throw new IllegalArgumentException("Object Type for class " + o.getClass().getName() + " not in Union declaration");
            }
            return union;
        }
        if (o instanceof Integer) {
            return new IntWritable((int) o);
        }
        if (o instanceof Boolean) {
            return new BooleanWritable((boolean) o);
        }
        if (o instanceof Long) {
            return new LongWritable((long) o);
        }
        if (o instanceof Float) {
            return new FloatWritable((float) o);
        }
        if (o instanceof Double) {
            return new DoubleWritable((double) o);
        }
        if (o instanceof String || o instanceof Utf8 || o instanceof GenericData.EnumSymbol) {
            return new Text(o.toString());
        }
        if (o instanceof ByteBuffer) {
            return new BytesWritable(((ByteBuffer) o).array());
        }
        if (o instanceof int[]) {
            int[] intArray = (int[]) o;
            return Arrays.stream(intArray).mapToObj((element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("int"), element)).collect(Collectors.toList());
        }
        if (o instanceof long[]) {
            long[] longArray = (long[]) o;
            return Arrays.stream(longArray).mapToObj((element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("bigint"), element)).collect(Collectors.toList());
        }
        if (o instanceof float[]) {
            float[] floatArray = (float[]) o;
            return IntStream.range(0, floatArray.length).mapToDouble(i -> floatArray[i]).mapToObj((element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("float"), (float) element)).collect(Collectors.toList());
        }
        if (o instanceof double[]) {
            double[] doubleArray = (double[]) o;
            return Arrays.stream(doubleArray).mapToObj((element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("double"), element)).collect(Collectors.toList());
        }
        if (o instanceof boolean[]) {
            boolean[] booleanArray = (boolean[]) o;
            return IntStream.range(0, booleanArray.length).map(i -> booleanArray[i] ? 1 : 0).mapToObj((element) -> convertToORCObject(TypeInfoFactory.getPrimitiveTypeInfo("boolean"), element == 1)).collect(Collectors.toList());
        }
        if (o instanceof GenericData.Array) {
            GenericData.Array array = ((GenericData.Array) o);
            // The type information in this case is interpreted as a List
            TypeInfo listTypeInfo = ((ListTypeInfo) typeInfo).getListElementTypeInfo();
            return array.stream().map((element) -> convertToORCObject(listTypeInfo, element)).collect(Collectors.toList());
        }
        if (o instanceof List) {
            return o;
        }
        if (o instanceof Map) {
            Map map = new HashMap();
            TypeInfo keyInfo = ((MapTypeInfo) typeInfo).getMapKeyTypeInfo();
            TypeInfo valueInfo = ((MapTypeInfo) typeInfo).getMapValueTypeInfo();
            // Unions are not allowed as key/value types, so if we convert the key and value objects,
            // they should return Writable objects
            ((Map) o).forEach((key, value) -> {
                Object keyObject = convertToORCObject(keyInfo, key);
                Object valueObject = convertToORCObject(valueInfo, value);
                if (keyObject == null) {
                    throw new IllegalArgumentException("Maps' key cannot be null");
                }
                map.put(keyObject, valueObject);
            });
            return map;
        }
        if (o instanceof GenericData.Record) {
            GenericData.Record record = (GenericData.Record) o;
            TypeInfo recordSchema = NiFiOrcUtils.getOrcField(record.getSchema());
            List<Schema.Field> recordFields = record.getSchema().getFields();
            if (recordFields != null) {
                Object[] fieldObjects = new Object[recordFields.size()];
                for (int i = 0; i < recordFields.size(); i++) {
                    Schema.Field field = recordFields.get(i);
                    Schema fieldSchema = field.schema();
                    Object fieldObject = record.get(field.name());
                    fieldObjects[i] = NiFiOrcUtils.convertToORCObject(NiFiOrcUtils.getOrcField(fieldSchema), fieldObject);
                }
                return NiFiOrcUtils.createOrcStruct(recordSchema, fieldObjects);
            }
        }
        throw new IllegalArgumentException("Error converting object of type " + o.getClass().getName() + " to ORC type " + typeInfo.getTypeName());
    } else {
        return null;
    }
}
Also used : IntStream(java.util.stream.IntStream) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) HIVE_ORC_DEFAULT_ROW_INDEX_STRIDE(org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_ORC_DEFAULT_ROW_INDEX_STRIDE) Arrays(java.util.Arrays) Text(org.apache.hadoop.io.Text) HashMap(java.util.HashMap) StringUtils(org.apache.commons.lang3.StringUtils) DoubleWritable(org.apache.hadoop.io.DoubleWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteBuffer(java.nio.ByteBuffer) GenericData(org.apache.avro.generic.GenericData) ArrayList(java.util.ArrayList) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) Path(org.apache.hadoop.fs.Path) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) HIVE_ORC_DEFAULT_BLOCK_SIZE(org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_ORC_DEFAULT_BLOCK_SIZE) BytesWritable(org.apache.hadoop.io.BytesWritable) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) IntWritable(org.apache.hadoop.io.IntWritable) OutputStream(java.io.OutputStream) Utf8(org.apache.avro.util.Utf8) HIVE_ORC_DEFAULT_BLOCK_PADDING(org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_ORC_DEFAULT_BLOCK_PADDING) Schema(org.apache.avro.Schema) HIVE_ORC_WRITE_FORMAT(org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_ORC_WRITE_FORMAT) TypeInfoFactory(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory) HiveConf(org.apache.hadoop.hive.conf.HiveConf) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) SettableStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) List(java.util.List) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory) BloomFilterIO(org.apache.hadoop.hive.ql.io.filters.BloomFilterIO) FloatWritable(org.apache.hadoop.io.FloatWritable) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) HashMap(java.util.HashMap) Schema(org.apache.avro.Schema) DoubleWritable(org.apache.hadoop.io.DoubleWritable) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) ArrayList(java.util.ArrayList) List(java.util.List) LongWritable(org.apache.hadoop.io.LongWritable) IntWritable(org.apache.hadoop.io.IntWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo) GenericData(org.apache.avro.generic.GenericData) ByteBuffer(java.nio.ByteBuffer) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) Utf8(org.apache.avro.util.Utf8) HashMap(java.util.HashMap) Map(java.util.Map) UnionTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo)

Example 87 with DoubleWritable

use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project parquet-mr by apache.

the class TestParquetSerDe method testParquetHiveSerDe.

public void testParquetHiveSerDe() throws Throwable {
    try {
        // Create the SerDe
        System.out.println("test: testParquetHiveSerDe");
        final ParquetHiveSerDe serDe = new ParquetHiveSerDe();
        final Configuration conf = new Configuration();
        final Properties tbl = createProperties();
        serDe.initialize(conf, tbl);
        // Data
        final Writable[] arr = new Writable[8];
        arr[0] = new ByteWritable((byte) 123);
        arr[1] = new ShortWritable((short) 456);
        arr[2] = new IntWritable(789);
        arr[3] = new LongWritable(1000l);
        arr[4] = new DoubleWritable((double) 5.3);
        arr[5] = new BinaryWritable(Binary.fromString("hive and hadoop and parquet. Big family."));
        final Writable[] mapContainer = new Writable[1];
        final Writable[] map = new Writable[3];
        for (int i = 0; i < 3; ++i) {
            final Writable[] pair = new Writable[2];
            pair[0] = new BinaryWritable(Binary.fromString("key_" + i));
            pair[1] = new IntWritable(i);
            map[i] = new ArrayWritable(Writable.class, pair);
        }
        mapContainer[0] = new ArrayWritable(Writable.class, map);
        arr[6] = new ArrayWritable(Writable.class, mapContainer);
        final Writable[] arrayContainer = new Writable[1];
        final Writable[] array = new Writable[5];
        for (int i = 0; i < 5; ++i) {
            array[i] = new BinaryWritable(Binary.fromString("elem_" + i));
        }
        arrayContainer[0] = new ArrayWritable(Writable.class, array);
        arr[7] = new ArrayWritable(Writable.class, arrayContainer);
        final ArrayWritable arrWritable = new ArrayWritable(Writable.class, arr);
        // Test
        deserializeAndSerializeLazySimple(serDe, arrWritable);
        System.out.println("test: testParquetHiveSerDe - OK");
    } catch (final Throwable e) {
        e.printStackTrace();
        throw e;
    }
}
Also used : ParquetHiveSerDe(org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe) Configuration(org.apache.hadoop.conf.Configuration) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) BinaryWritable(org.apache.hadoop.hive.ql.io.parquet.writable.BinaryWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) IntWritable(org.apache.hadoop.io.IntWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) Properties(java.util.Properties) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) ArrayWritable(org.apache.hadoop.io.ArrayWritable) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) BinaryWritable(org.apache.hadoop.hive.ql.io.parquet.writable.BinaryWritable)

Example 88 with DoubleWritable

use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project presto by prestodb.

the class TestObjectEncoders method testPrimitiveObjectEncoders.

@Test
public void testPrimitiveObjectEncoders() {
    ObjectInspector inspector;
    ObjectEncoder encoder;
    inspector = writableLongObjectInspector;
    encoder = createEncoder(BIGINT, inspector);
    assertTrue(encoder.encode(new LongWritable(123456L)) instanceof Long);
    inspector = writableIntObjectInspector;
    encoder = createEncoder(INTEGER, inspector);
    assertTrue(encoder.encode(new IntWritable(12345)) instanceof Long);
    inspector = writableShortObjectInspector;
    encoder = createEncoder(SMALLINT, inspector);
    assertTrue(encoder.encode(new ShortWritable((short) 1234)) instanceof Long);
    inspector = writableByteObjectInspector;
    encoder = createEncoder(TINYINT, inspector);
    assertTrue(encoder.encode(new ByteWritable((byte) 123)) instanceof Long);
    inspector = writableBooleanObjectInspector;
    encoder = createEncoder(BOOLEAN, inspector);
    assertTrue(encoder.encode(new BooleanWritable(true)) instanceof Boolean);
    inspector = writableDoubleObjectInspector;
    encoder = createEncoder(DOUBLE, inspector);
    assertTrue(encoder.encode(new DoubleWritable(0.1)) instanceof Double);
    inspector = writableDateObjectInspector;
    encoder = createEncoder(DATE, inspector);
    assertTrue(encoder.encode(new DateWritable(DateTimeUtils.createDate(18380L))) instanceof Long);
    inspector = writableHiveDecimalObjectInspector;
    encoder = createEncoder(createDecimalType(11, 10), inspector);
    assertTrue(encoder.encode(new HiveDecimalWritable("1.2345678910")) instanceof Long);
    encoder = createEncoder(createDecimalType(34, 33), inspector);
    assertTrue(encoder.encode(new HiveDecimalWritable("1.281734081274028174012432412423134")) instanceof Slice);
}
Also used : PrimitiveObjectInspectorFactory.writableByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableByteObjectInspector) PrimitiveObjectInspectorFactory.writableDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDateObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveObjectInspectorFactory.writableShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableShortObjectInspector) PrimitiveObjectInspectorFactory.writableStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableStringObjectInspector) PrimitiveObjectInspectorFactory.writableBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBooleanObjectInspector) PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableHiveDecimalObjectInspector) PrimitiveObjectInspectorFactory.writableBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableBinaryObjectInspector) PrimitiveObjectInspectorFactory.writableIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableIntObjectInspector) PrimitiveObjectInspectorFactory.writableDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableDoubleObjectInspector) PrimitiveObjectInspectorFactory.writableLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory.writableLongObjectInspector) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DoubleWritable(org.apache.hadoop.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) Slice(io.airlift.slice.Slice) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) Test(org.testng.annotations.Test)

Example 89 with DoubleWritable

use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project presto by prestodb.

the class RcFileTester method decodeRecordReaderValue.

private static Object decodeRecordReaderValue(Type type, Object actualValue) {
    if (actualValue instanceof LazyPrimitive) {
        actualValue = ((LazyPrimitive<?, ?>) actualValue).getWritableObject();
    }
    if (actualValue instanceof BooleanWritable) {
        actualValue = ((BooleanWritable) actualValue).get();
    } else if (actualValue instanceof ByteWritable) {
        actualValue = ((ByteWritable) actualValue).get();
    } else if (actualValue instanceof BytesWritable) {
        actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes());
    } else if (actualValue instanceof DateWritable) {
        actualValue = new SqlDate(((DateWritable) actualValue).getDays());
    } else if (actualValue instanceof DoubleWritable) {
        actualValue = ((DoubleWritable) actualValue).get();
    } else if (actualValue instanceof FloatWritable) {
        actualValue = ((FloatWritable) actualValue).get();
    } else if (actualValue instanceof IntWritable) {
        actualValue = ((IntWritable) actualValue).get();
    } else if (actualValue instanceof LongWritable) {
        actualValue = ((LongWritable) actualValue).get();
    } else if (actualValue instanceof ShortWritable) {
        actualValue = ((ShortWritable) actualValue).get();
    } else if (actualValue instanceof HiveDecimalWritable) {
        DecimalType decimalType = (DecimalType) type;
        HiveDecimalWritable writable = (HiveDecimalWritable) actualValue;
        // writable messes with the scale so rescale the values to the Presto type
        BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale());
        actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale());
    } else if (actualValue instanceof Text) {
        actualValue = actualValue.toString();
    } else if (actualValue instanceof TimestampWritable) {
        TimestampWritable timestamp = (TimestampWritable) actualValue;
        if (SESSION.getSqlFunctionProperties().isLegacyTimestamp()) {
            actualValue = new SqlTimestamp((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L), UTC_KEY);
        } else {
            actualValue = new SqlTimestamp((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L));
        }
    } else if (actualValue instanceof StructObject) {
        StructObject structObject = (StructObject) actualValue;
        actualValue = decodeRecordReaderStruct(type, structObject.getFieldsAsList());
    } else if (actualValue instanceof LazyBinaryArray) {
        actualValue = decodeRecordReaderList(type, ((LazyBinaryArray) actualValue).getList());
    } else if (actualValue instanceof LazyBinaryMap) {
        actualValue = decodeRecordReaderMap(type, ((LazyBinaryMap) actualValue).getMap());
    } else if (actualValue instanceof LazyArray) {
        actualValue = decodeRecordReaderList(type, ((LazyArray) actualValue).getList());
    } else if (actualValue instanceof LazyMap) {
        actualValue = decodeRecordReaderMap(type, ((LazyMap) actualValue).getMap());
    } else if (actualValue instanceof List) {
        actualValue = decodeRecordReaderList(type, ((List<?>) actualValue));
    }
    return actualValue;
}
Also used : SqlVarbinary(com.facebook.presto.common.type.SqlVarbinary) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.io.DoubleWritable) SqlTimestamp(com.facebook.presto.common.type.SqlTimestamp) LazyBinaryArray(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryArray) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) LazyPrimitive(org.apache.hadoop.hive.serde2.lazy.LazyPrimitive) StructObject(org.apache.hadoop.hive.serde2.StructObject) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) Collectors.toList(java.util.stream.Collectors.toList) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) LazyMap(org.apache.hadoop.hive.serde2.lazy.LazyMap) BytesWritable(org.apache.hadoop.io.BytesWritable) SqlDecimal(com.facebook.presto.common.type.SqlDecimal) Text(org.apache.hadoop.io.Text) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) SqlDate(com.facebook.presto.common.type.SqlDate) DecimalType(com.facebook.presto.common.type.DecimalType) BigInteger(java.math.BigInteger) LazyArray(org.apache.hadoop.hive.serde2.lazy.LazyArray) LazyBinaryMap(org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryMap)

Example 90 with DoubleWritable

use of org.apache.hadoop.hive.serde2.io.DoubleWritable in project flink by apache.

the class HiveShimV100 method javaToWritable.

Optional<Writable> javaToWritable(@Nonnull Object value) {
    Writable writable = null;
    // in case value is already a Writable
    if (value instanceof Writable) {
        writable = (Writable) value;
    } else if (value instanceof Boolean) {
        writable = new BooleanWritable((Boolean) value);
    } else if (value instanceof Byte) {
        writable = new ByteWritable((Byte) value);
    } else if (value instanceof Short) {
        writable = new ShortWritable((Short) value);
    } else if (value instanceof Integer) {
        writable = new IntWritable((Integer) value);
    } else if (value instanceof Long) {
        writable = new LongWritable((Long) value);
    } else if (value instanceof Float) {
        writable = new FloatWritable((Float) value);
    } else if (value instanceof Double) {
        writable = new DoubleWritable((Double) value);
    } else if (value instanceof String) {
        writable = new Text((String) value);
    } else if (value instanceof HiveChar) {
        writable = new HiveCharWritable((HiveChar) value);
    } else if (value instanceof HiveVarchar) {
        writable = new HiveVarcharWritable((HiveVarchar) value);
    } else if (value instanceof HiveDecimal) {
        writable = new HiveDecimalWritable((HiveDecimal) value);
    } else if (value instanceof Date) {
        writable = new DateWritable((Date) value);
    } else if (value instanceof Timestamp) {
        writable = new TimestampWritable((Timestamp) value);
    } else if (value instanceof BigDecimal) {
        HiveDecimal hiveDecimal = HiveDecimal.create((BigDecimal) value);
        writable = new HiveDecimalWritable(hiveDecimal);
    } else if (value instanceof byte[]) {
        writable = new BytesWritable((byte[]) value);
    }
    return Optional.ofNullable(writable);
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) LocalDate(java.time.LocalDate) CatalogColumnStatisticsDataDate(org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataDate) Date(java.sql.Date) BigDecimal(java.math.BigDecimal) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Aggregations

DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)132 Test (org.junit.Test)85 IntWritable (org.apache.hadoop.io.IntWritable)71 LongWritable (org.apache.hadoop.io.LongWritable)70 Text (org.apache.hadoop.io.Text)57 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)52 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)51 FloatWritable (org.apache.hadoop.io.FloatWritable)51 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)49 BooleanWritable (org.apache.hadoop.io.BooleanWritable)46 DeferredJavaObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject)42 DeferredObject (org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject)42 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)42 PrimitiveObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector)40 BytesWritable (org.apache.hadoop.io.BytesWritable)36 ArrayList (java.util.ArrayList)29 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)27 HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)21 HiveCharWritable (org.apache.hadoop.hive.serde2.io.HiveCharWritable)21 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)19