Search in sources :

Example 91 with HiveVarchar

use of org.apache.hadoop.hive.common.type.HiveVarchar in project presto by prestodb.

the class GenericHiveRecordCursor method parseStringFromPrimitiveJavaObjectValue.

private static Slice parseStringFromPrimitiveJavaObjectValue(Type type, Object fieldValue) {
    checkState(fieldValue != null, "fieldValue should not be null");
    Slice value;
    if (fieldValue instanceof String) {
        value = Slices.utf8Slice((String) fieldValue);
    } else if (fieldValue instanceof byte[]) {
        value = Slices.wrappedBuffer((byte[]) fieldValue);
    } else if (fieldValue instanceof HiveVarchar) {
        value = Slices.utf8Slice(((HiveVarchar) fieldValue).getValue());
    } else if (fieldValue instanceof HiveChar) {
        value = Slices.utf8Slice(((HiveChar) fieldValue).getValue());
    } else {
        throw new IllegalStateException("unsupported string field type: " + fieldValue.getClass().getName());
    }
    value = trimStringToCharacterLimits(type, value);
    // Copy the slice if the value was trimmed and is now smaller than the backing buffer
    if (!value.isCompact()) {
        return Slices.copyOf(value);
    }
    return value;
}
Also used : Slice(io.airlift.slice.Slice) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar)

Example 92 with HiveVarchar

use of org.apache.hadoop.hive.common.type.HiveVarchar in project presto by prestodb.

the class TestHiveBucketing method getHiveBucket.

public static int getHiveBucket(List<Entry<ObjectInspector, Object>> columnBindings, int bucketCount) throws HiveException {
    GenericUDFHash udf = new GenericUDFHash();
    ObjectInspector[] objectInspectors = new ObjectInspector[columnBindings.size()];
    GenericUDF.DeferredObject[] deferredObjects = new GenericUDF.DeferredObject[columnBindings.size()];
    int i = 0;
    for (Entry<ObjectInspector, Object> entry : columnBindings) {
        objectInspectors[i] = entry.getKey();
        if (entry.getValue() != null && entry.getKey() instanceof JavaHiveVarcharObjectInspector) {
            JavaHiveVarcharObjectInspector varcharObjectInspector = (JavaHiveVarcharObjectInspector) entry.getKey();
            deferredObjects[i] = new GenericUDF.DeferredJavaObject(new HiveVarchar(((String) entry.getValue()), varcharObjectInspector.getMaxLength()));
        } else {
            deferredObjects[i] = new GenericUDF.DeferredJavaObject(entry.getValue());
        }
        i++;
    }
    ObjectInspector udfInspector = udf.initialize(objectInspectors);
    IntObjectInspector inspector = (IntObjectInspector) udfInspector;
    Object result = udf.evaluate(deferredObjects);
    HiveKey hiveKey = new HiveKey();
    hiveKey.setHashCode(inspector.get(result));
    return new DefaultHivePartitioner<>().getBucket(hiveKey, null, bucketCount);
}
Also used : JavaHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveVarcharObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) JavaHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaHiveVarcharObjectInspector) GenericUDF(org.apache.hadoop.hive.ql.udf.generic.GenericUDF) HiveKey(org.apache.hadoop.hive.ql.io.HiveKey) GenericUDFHash(org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash)

Example 93 with HiveVarchar

use of org.apache.hadoop.hive.common.type.HiveVarchar in project presto by prestodb.

the class TestHiveFileFormats method testFailForLongVarcharPartitionColumn.

@Test
public void testFailForLongVarcharPartitionColumn() throws Exception {
    TestColumn partitionColumn = new TestColumn("partition_column", getPrimitiveJavaObjectInspector(new VarcharTypeInfo(3)), "test", utf8Slice("tes"), true);
    TestColumn varcharColumn = new TestColumn("varchar_column", getPrimitiveJavaObjectInspector(new VarcharTypeInfo(3)), new HiveVarchar("tes", 3), utf8Slice("tes"));
    List<TestColumn> columns = ImmutableList.of(partitionColumn, varcharColumn);
    HiveErrorCode expectedErrorCode = HIVE_INVALID_PARTITION_VALUE;
    String expectedMessage = "Invalid partition value 'test' for varchar\\(3\\) partition key: partition_column";
    assertThatFileFormat(RCTEXT).withColumns(columns).isFailingForPageSource(new RcFilePageSourceFactory(FUNCTION_AND_TYPE_MANAGER, HDFS_ENVIRONMENT, STATS), expectedErrorCode, expectedMessage).isFailingForRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
    assertThatFileFormat(RCBINARY).withColumns(columns).isFailingForPageSource(new RcFilePageSourceFactory(FUNCTION_AND_TYPE_MANAGER, HDFS_ENVIRONMENT, STATS), expectedErrorCode, expectedMessage).isFailingForRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
    assertThatFileFormat(ORC).withColumns(columns).isFailingForPageSource(new OrcBatchPageSourceFactory(FUNCTION_AND_TYPE_MANAGER, FUNCTION_RESOLUTION, false, HDFS_ENVIRONMENT, STATS, 100, new StorageOrcFileTailSource(), StripeMetadataSourceFactory.of(new StorageStripeMetadataSource())), expectedErrorCode, expectedMessage);
    assertThatFileFormat(PARQUET).withColumns(columns).withSession(parquetPageSourceSession).isFailingForPageSource(new ParquetPageSourceFactory(FUNCTION_AND_TYPE_MANAGER, FUNCTION_RESOLUTION, HDFS_ENVIRONMENT, STATS, METADATA_READER), expectedErrorCode, expectedMessage);
    assertThatFileFormat(SEQUENCEFILE).withColumns(columns).isFailingForRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
    assertThatFileFormat(TEXTFILE).withColumns(columns).isFailingForRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT), expectedErrorCode, expectedMessage);
}
Also used : VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) OrcBatchPageSourceFactory(com.facebook.presto.hive.orc.OrcBatchPageSourceFactory) StorageOrcFileTailSource(com.facebook.presto.orc.cache.StorageOrcFileTailSource) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) StorageStripeMetadataSource(com.facebook.presto.orc.StorageStripeMetadataSource) RcFilePageSourceFactory(com.facebook.presto.hive.rcfile.RcFilePageSourceFactory) ParquetPageSourceFactory(com.facebook.presto.hive.parquet.ParquetPageSourceFactory) Test(org.testng.annotations.Test)

Example 94 with HiveVarchar

use of org.apache.hadoop.hive.common.type.HiveVarchar in project flink by apache.

the class HiveInspectors method getConversion.

/**
 * Get conversion for converting Flink object to Hive object from an ObjectInspector and the
 * corresponding Flink DataType.
 */
public static HiveObjectConversion getConversion(ObjectInspector inspector, LogicalType dataType, HiveShim hiveShim) {
    if (inspector instanceof PrimitiveObjectInspector) {
        HiveObjectConversion conversion;
        if (inspector instanceof BooleanObjectInspector || inspector instanceof StringObjectInspector || inspector instanceof ByteObjectInspector || inspector instanceof ShortObjectInspector || inspector instanceof IntObjectInspector || inspector instanceof LongObjectInspector || inspector instanceof FloatObjectInspector || inspector instanceof DoubleObjectInspector || inspector instanceof BinaryObjectInspector || inspector instanceof VoidObjectInspector) {
            conversion = IdentityConversion.INSTANCE;
        } else if (inspector instanceof DateObjectInspector) {
            conversion = hiveShim::toHiveDate;
        } else if (inspector instanceof TimestampObjectInspector) {
            conversion = hiveShim::toHiveTimestamp;
        } else if (inspector instanceof HiveCharObjectInspector) {
            conversion = o -> o == null ? null : new HiveChar((String) o, ((CharType) dataType).getLength());
        } else if (inspector instanceof HiveVarcharObjectInspector) {
            conversion = o -> o == null ? null : new HiveVarchar((String) o, ((VarCharType) dataType).getLength());
        } else if (inspector instanceof HiveDecimalObjectInspector) {
            conversion = o -> o == null ? null : HiveDecimal.create((BigDecimal) o);
        } else {
            throw new FlinkHiveUDFException("Unsupported primitive object inspector " + inspector.getClass().getName());
        }
        // currently this happens for constant arguments for UDFs
        if (((PrimitiveObjectInspector) inspector).preferWritable()) {
            conversion = new WritableHiveObjectConversion(conversion, hiveShim);
        }
        return conversion;
    }
    if (inspector instanceof ListObjectInspector) {
        HiveObjectConversion eleConvert = getConversion(((ListObjectInspector) inspector).getListElementObjectInspector(), ((ArrayType) dataType).getElementType(), hiveShim);
        return o -> {
            if (o == null) {
                return null;
            }
            Object[] array = (Object[]) o;
            List<Object> result = new ArrayList<>();
            for (Object ele : array) {
                result.add(eleConvert.toHiveObject(ele));
            }
            return result;
        };
    }
    if (inspector instanceof MapObjectInspector) {
        MapObjectInspector mapInspector = (MapObjectInspector) inspector;
        MapType kvType = (MapType) dataType;
        HiveObjectConversion keyConversion = getConversion(mapInspector.getMapKeyObjectInspector(), kvType.getKeyType(), hiveShim);
        HiveObjectConversion valueConversion = getConversion(mapInspector.getMapValueObjectInspector(), kvType.getValueType(), hiveShim);
        return o -> {
            if (o == null) {
                return null;
            }
            Map<Object, Object> map = (Map) o;
            Map<Object, Object> result = new HashMap<>(map.size());
            for (Map.Entry<Object, Object> entry : map.entrySet()) {
                result.put(keyConversion.toHiveObject(entry.getKey()), valueConversion.toHiveObject(entry.getValue()));
            }
            return result;
        };
    }
    if (inspector instanceof StructObjectInspector) {
        StructObjectInspector structInspector = (StructObjectInspector) inspector;
        List<? extends StructField> structFields = structInspector.getAllStructFieldRefs();
        List<RowType.RowField> rowFields = ((RowType) dataType).getFields();
        HiveObjectConversion[] conversions = new HiveObjectConversion[structFields.size()];
        for (int i = 0; i < structFields.size(); i++) {
            conversions[i] = getConversion(structFields.get(i).getFieldObjectInspector(), rowFields.get(i).getType(), hiveShim);
        }
        return o -> {
            if (o == null) {
                return null;
            }
            Row row = (Row) o;
            List<Object> result = new ArrayList<>(row.getArity());
            for (int i = 0; i < row.getArity(); i++) {
                result.add(conversions[i].toHiveObject(row.getField(i)));
            }
            return result;
        };
    }
    throw new FlinkHiveUDFException(String.format("Flink doesn't support convert object conversion for %s yet", inspector));
}
Also used : DataType(org.apache.flink.table.types.DataType) VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) Array(java.lang.reflect.Array) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) MapType(org.apache.flink.table.types.logical.MapType) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) CharType(org.apache.flink.table.types.logical.CharType) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) MapTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo) BigDecimal(java.math.BigDecimal) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) Map(java.util.Map) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) StructTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) PrimitiveTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo) HiveTypeUtil(org.apache.flink.table.catalog.hive.util.HiveTypeUtil) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) List(java.util.List) HiveReflectionUtils(org.apache.flink.table.catalog.hive.util.HiveReflectionUtils) LogicalType(org.apache.flink.table.types.logical.LogicalType) VarcharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) Row(org.apache.flink.types.Row) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) StructField(org.apache.hadoop.hive.serde2.objectinspector.StructField) TypeInfoUtils(org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) HashMap(java.util.HashMap) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) RowType(org.apache.flink.table.types.logical.RowType) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) Constructor(java.lang.reflect.Constructor) HiveShim(org.apache.flink.table.catalog.hive.client.HiveShim) ArrayList(java.util.ArrayList) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) ListTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) Nonnull(javax.annotation.Nonnull) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) VarCharType(org.apache.flink.table.types.logical.VarCharType) ArrayType(org.apache.flink.table.types.logical.ArrayType) DecimalTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo) FlinkHiveUDFException(org.apache.flink.table.functions.hive.FlinkHiveUDFException) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) ObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory) PrimitiveObjectInspectorFactory(org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) Internal(org.apache.flink.annotation.Internal) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) CharTypeInfo(org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo) WritableConstantLongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantLongObjectInspector) LongObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector) WritableConstantDateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDateObjectInspector) DateObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector) IntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector) WritableConstantIntObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantIntObjectInspector) HiveChar(org.apache.hadoop.hive.common.type.HiveChar) WritableConstantBinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBinaryObjectInspector) BinaryObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector) RowType(org.apache.flink.table.types.logical.RowType) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) WritableConstantStringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector) MapType(org.apache.flink.table.types.logical.MapType) FloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector) WritableConstantFloatObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantFloatObjectInspector) WritableConstantByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantByteObjectInspector) ByteObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector) WritableConstantTimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector) TimestampObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector) ShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector) WritableConstantShortObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector) MapObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector) ListObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector) HiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector) WritableConstantHiveDecimalObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveDecimalObjectInspector) List(java.util.List) ArrayList(java.util.ArrayList) WritableConstantHiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveCharObjectInspector) HiveCharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector) FlinkHiveUDFException(org.apache.flink.table.functions.hive.FlinkHiveUDFException) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) BigDecimal(java.math.BigDecimal) WritableConstantDoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantDoubleObjectInspector) DoubleObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector) WritableConstantHiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantHiveVarcharObjectInspector) HiveVarcharObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector) PrimitiveObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector) VoidObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector) CharType(org.apache.flink.table.types.logical.CharType) VarCharType(org.apache.flink.table.types.logical.VarCharType) Row(org.apache.flink.types.Row) BooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector) WritableConstantBooleanObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantBooleanObjectInspector) Map(java.util.Map) HashMap(java.util.HashMap) StandardStructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 95 with HiveVarchar

use of org.apache.hadoop.hive.common.type.HiveVarchar in project flink by apache.

the class HiveShimV100 method javaToWritable.

Optional<Writable> javaToWritable(@Nonnull Object value) {
    Writable writable = null;
    // in case value is already a Writable
    if (value instanceof Writable) {
        writable = (Writable) value;
    } else if (value instanceof Boolean) {
        writable = new BooleanWritable((Boolean) value);
    } else if (value instanceof Byte) {
        writable = new ByteWritable((Byte) value);
    } else if (value instanceof Short) {
        writable = new ShortWritable((Short) value);
    } else if (value instanceof Integer) {
        writable = new IntWritable((Integer) value);
    } else if (value instanceof Long) {
        writable = new LongWritable((Long) value);
    } else if (value instanceof Float) {
        writable = new FloatWritable((Float) value);
    } else if (value instanceof Double) {
        writable = new DoubleWritable((Double) value);
    } else if (value instanceof String) {
        writable = new Text((String) value);
    } else if (value instanceof HiveChar) {
        writable = new HiveCharWritable((HiveChar) value);
    } else if (value instanceof HiveVarchar) {
        writable = new HiveVarcharWritable((HiveVarchar) value);
    } else if (value instanceof HiveDecimal) {
        writable = new HiveDecimalWritable((HiveDecimal) value);
    } else if (value instanceof Date) {
        writable = new DateWritable((Date) value);
    } else if (value instanceof Timestamp) {
        writable = new TimestampWritable((Timestamp) value);
    } else if (value instanceof BigDecimal) {
        HiveDecimal hiveDecimal = HiveDecimal.create((BigDecimal) value);
        writable = new HiveDecimalWritable(hiveDecimal);
    } else if (value instanceof byte[]) {
        writable = new BytesWritable((byte[]) value);
    }
    return Optional.ofNullable(writable);
}
Also used : HiveChar(org.apache.hadoop.hive.common.type.HiveChar) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) Writable(org.apache.hadoop.io.Writable) LongWritable(org.apache.hadoop.io.LongWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) BytesWritable(org.apache.hadoop.io.BytesWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) FloatWritable(org.apache.hadoop.io.FloatWritable) TimestampWritable(org.apache.hadoop.hive.serde2.io.TimestampWritable) DoubleWritable(org.apache.hadoop.hive.serde2.io.DoubleWritable) ShortWritable(org.apache.hadoop.hive.serde2.io.ShortWritable) Timestamp(java.sql.Timestamp) HiveDecimal(org.apache.hadoop.hive.common.type.HiveDecimal) LongWritable(org.apache.hadoop.io.LongWritable) ByteWritable(org.apache.hadoop.hive.serde2.io.ByteWritable) IntWritable(org.apache.hadoop.io.IntWritable) HiveDecimalWritable(org.apache.hadoop.hive.serde2.io.HiveDecimalWritable) DateWritable(org.apache.hadoop.hive.serde2.io.DateWritable) HiveCharWritable(org.apache.hadoop.hive.serde2.io.HiveCharWritable) HiveVarcharWritable(org.apache.hadoop.hive.serde2.io.HiveVarcharWritable) Text(org.apache.hadoop.io.Text) BytesWritable(org.apache.hadoop.io.BytesWritable) HiveVarchar(org.apache.hadoop.hive.common.type.HiveVarchar) LocalDate(java.time.LocalDate) CatalogColumnStatisticsDataDate(org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataDate) Date(java.sql.Date) BigDecimal(java.math.BigDecimal) FloatWritable(org.apache.hadoop.io.FloatWritable) BooleanWritable(org.apache.hadoop.io.BooleanWritable)

Aggregations

HiveVarchar (org.apache.hadoop.hive.common.type.HiveVarchar)95 HiveChar (org.apache.hadoop.hive.common.type.HiveChar)61 Test (org.junit.Test)35 Text (org.apache.hadoop.io.Text)31 HiveDecimal (org.apache.hadoop.hive.common.type.HiveDecimal)28 HiveVarcharWritable (org.apache.hadoop.hive.serde2.io.HiveVarcharWritable)27 VarcharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo)26 HiveDecimalWritable (org.apache.hadoop.hive.serde2.io.HiveDecimalWritable)23 DoubleWritable (org.apache.hadoop.hive.serde2.io.DoubleWritable)21 PrimitiveTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo)21 ArrayList (java.util.ArrayList)20 Timestamp (org.apache.hadoop.hive.common.type.Timestamp)20 CharTypeInfo (org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo)20 LongWritable (org.apache.hadoop.io.LongWritable)19 Date (org.apache.hadoop.hive.common.type.Date)18 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)18 ByteWritable (org.apache.hadoop.hive.serde2.io.ByteWritable)17 ShortWritable (org.apache.hadoop.hive.serde2.io.ShortWritable)17 BooleanWritable (org.apache.hadoop.io.BooleanWritable)17 FloatWritable (org.apache.hadoop.io.FloatWritable)17