Search in sources :

Example 1 with TypeInformationRawType

use of org.apache.flink.table.types.logical.TypeInformationRawType in project flink by apache.

the class AvroSchemaConverterTest method validateUserSchema.

private void validateUserSchema(DataType actual) {
    final DataType address = DataTypes.ROW(DataTypes.FIELD("num", DataTypes.INT().notNull()), DataTypes.FIELD("street", DataTypes.STRING().notNull()), DataTypes.FIELD("city", DataTypes.STRING().notNull()), DataTypes.FIELD("state", DataTypes.STRING().notNull()), DataTypes.FIELD("zip", DataTypes.STRING().notNull()));
    final DataType user = DataTypes.ROW(DataTypes.FIELD("name", DataTypes.STRING().notNull()), DataTypes.FIELD("favorite_number", DataTypes.INT()), DataTypes.FIELD("favorite_color", DataTypes.STRING()), DataTypes.FIELD("type_long_test", DataTypes.BIGINT()), DataTypes.FIELD("type_double_test", DataTypes.DOUBLE().notNull()), DataTypes.FIELD("type_null_test", DataTypes.NULL()), DataTypes.FIELD("type_bool_test", DataTypes.BOOLEAN().notNull()), DataTypes.FIELD("type_array_string", DataTypes.ARRAY(DataTypes.STRING().notNull()).notNull()), DataTypes.FIELD("type_array_boolean", DataTypes.ARRAY(DataTypes.BOOLEAN().notNull()).notNull()), DataTypes.FIELD("type_nullable_array", DataTypes.ARRAY(DataTypes.STRING().notNull())), DataTypes.FIELD("type_enum", DataTypes.STRING().notNull()), DataTypes.FIELD("type_map", DataTypes.MAP(DataTypes.STRING().notNull(), DataTypes.BIGINT().notNull()).notNull()), DataTypes.FIELD("type_fixed", DataTypes.VARBINARY(16)), DataTypes.FIELD("type_union", new AtomicDataType(new TypeInformationRawType<>(false, Types.GENERIC(Object.class)), Object.class)), DataTypes.FIELD("type_nested", address), DataTypes.FIELD("type_bytes", DataTypes.BYTES().notNull()), DataTypes.FIELD("type_date", DataTypes.DATE().notNull()), DataTypes.FIELD("type_time_millis", DataTypes.TIME(3).notNull()), DataTypes.FIELD("type_time_micros", DataTypes.TIME(6).notNull()), DataTypes.FIELD("type_timestamp_millis", DataTypes.TIMESTAMP(3).notNull()), DataTypes.FIELD("type_timestamp_micros", DataTypes.TIMESTAMP(6).notNull()), DataTypes.FIELD("type_decimal_bytes", DataTypes.DECIMAL(4, 2).notNull()), DataTypes.FIELD("type_decimal_fixed", DataTypes.DECIMAL(4, 2).notNull())).notNull();
    assertEquals(user, actual);
}
Also used : TypeInformationRawType(org.apache.flink.table.types.logical.TypeInformationRawType) AtomicDataType(org.apache.flink.table.types.AtomicDataType) DataType(org.apache.flink.table.types.DataType) AtomicDataType(org.apache.flink.table.types.AtomicDataType)

Example 2 with TypeInformationRawType

use of org.apache.flink.table.types.logical.TypeInformationRawType in project flink by apache.

the class DataFormatConverters method getConverterForDataType.

/**
 * Get {@link DataFormatConverter} for {@link DataType}.
 *
 * @param originDataType DataFormatConverter is oriented to Java format, while LogicalType has
 *     lost its specific Java format. Only DataType retains all its Java format information.
 */
public static DataFormatConverter getConverterForDataType(DataType originDataType) {
    DataType dataType = originDataType.nullable();
    DataFormatConverter converter = TYPE_TO_CONVERTER.get(dataType);
    if (converter != null) {
        return converter;
    }
    Class<?> clazz = dataType.getConversionClass();
    LogicalType logicalType = dataType.getLogicalType();
    switch(logicalType.getTypeRoot()) {
        case CHAR:
        case VARCHAR:
            if (clazz == String.class) {
                return StringConverter.INSTANCE;
            } else if (clazz == StringData.class) {
                return StringDataConverter.INSTANCE;
            } else {
                throw new RuntimeException("Not support class for VARCHAR: " + clazz);
            }
        case BINARY:
        case VARBINARY:
            return PrimitiveByteArrayConverter.INSTANCE;
        case DECIMAL:
            Tuple2<Integer, Integer> ps = getPrecision(logicalType);
            if (clazz == BigDecimal.class) {
                return new BigDecimalConverter(ps.f0, ps.f1);
            } else if (clazz == DecimalData.class) {
                return new DecimalDataConverter(ps.f0, ps.f1);
            } else {
                throw new RuntimeException("Not support conversion class for DECIMAL: " + clazz);
            }
        case TIMESTAMP_WITHOUT_TIME_ZONE:
            int precisionOfTS = getDateTimePrecision(logicalType);
            if (clazz == Timestamp.class) {
                return new TimestampConverter(precisionOfTS);
            } else if (clazz == LocalDateTime.class) {
                return new LocalDateTimeConverter(precisionOfTS);
            } else if (clazz == TimestampData.class) {
                return new TimestampDataConverter(precisionOfTS);
            } else {
                throw new RuntimeException("Not support conversion class for TIMESTAMP WITHOUT TIME ZONE: " + clazz);
            }
        case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
            int precisionOfLZTS = getDateTimePrecision(logicalType);
            if (clazz == Instant.class) {
                return new InstantConverter(precisionOfLZTS);
            } else if (clazz == Long.class || clazz == long.class) {
                return new LongTimestampDataConverter(precisionOfLZTS);
            } else if (clazz == TimestampData.class) {
                return new TimestampDataConverter(precisionOfLZTS);
            } else if (clazz == Timestamp.class) {
                return new TimestampLtzConverter(precisionOfLZTS);
            } else {
                throw new RuntimeException("Not support conversion class for TIMESTAMP WITH LOCAL TIME ZONE: " + clazz);
            }
        case ARRAY:
            if (clazz == ArrayData.class) {
                return ArrayDataConverter.INSTANCE;
            } else if (clazz == boolean[].class) {
                return PrimitiveBooleanArrayConverter.INSTANCE;
            } else if (clazz == short[].class) {
                return PrimitiveShortArrayConverter.INSTANCE;
            } else if (clazz == int[].class) {
                return PrimitiveIntArrayConverter.INSTANCE;
            } else if (clazz == long[].class) {
                return PrimitiveLongArrayConverter.INSTANCE;
            } else if (clazz == float[].class) {
                return PrimitiveFloatArrayConverter.INSTANCE;
            } else if (clazz == double[].class) {
                return PrimitiveDoubleArrayConverter.INSTANCE;
            }
            if (dataType instanceof CollectionDataType) {
                return new ObjectArrayConverter(((CollectionDataType) dataType).getElementDataType().bridgedTo(clazz.getComponentType()));
            } else {
                BasicArrayTypeInfo typeInfo = (BasicArrayTypeInfo) ((LegacyTypeInformationType) dataType.getLogicalType()).getTypeInformation();
                return new ObjectArrayConverter(fromLegacyInfoToDataType(typeInfo.getComponentInfo()).bridgedTo(clazz.getComponentType()));
            }
        case MAP:
            if (clazz == MapData.class) {
                return MapDataConverter.INSTANCE;
            }
            KeyValueDataType keyValueDataType = (KeyValueDataType) dataType;
            return new MapConverter(keyValueDataType.getKeyDataType(), keyValueDataType.getValueDataType());
        case MULTISET:
            if (clazz == MapData.class) {
                return MapDataConverter.INSTANCE;
            }
            CollectionDataType collectionDataType = (CollectionDataType) dataType;
            return new MapConverter(collectionDataType.getElementDataType(), DataTypes.INT().bridgedTo(Integer.class));
        case ROW:
        case STRUCTURED_TYPE:
            TypeInformation<?> asTypeInfo = fromDataTypeToTypeInfo(dataType);
            if (asTypeInfo instanceof InternalTypeInfo && clazz == RowData.class) {
                LogicalType realLogicalType = ((InternalTypeInfo<?>) asTypeInfo).toLogicalType();
                return new RowDataConverter(getFieldCount(realLogicalType));
            }
            // legacy
            CompositeType compositeType = (CompositeType) asTypeInfo;
            DataType[] fieldTypes = Stream.iterate(0, x -> x + 1).limit(compositeType.getArity()).map((Function<Integer, TypeInformation>) compositeType::getTypeAt).map(TypeConversions::fromLegacyInfoToDataType).toArray(DataType[]::new);
            if (clazz == RowData.class) {
                return new RowDataConverter(compositeType.getArity());
            } else if (clazz == Row.class) {
                return new RowConverter(fieldTypes);
            } else if (Tuple.class.isAssignableFrom(clazz)) {
                return new TupleConverter((Class<Tuple>) clazz, fieldTypes);
            } else if (CaseClassConverter.PRODUCT_CLASS != null && CaseClassConverter.PRODUCT_CLASS.isAssignableFrom(clazz)) {
                return new CaseClassConverter((TupleTypeInfoBase) compositeType, fieldTypes);
            } else if (compositeType instanceof PojoTypeInfo) {
                return new PojoConverter((PojoTypeInfo) compositeType, fieldTypes);
            } else {
                throw new IllegalStateException("Cannot find a converter for type " + compositeType + ". If the target should be a converter to scala.Product, then you might have a scala classpath issue.");
            }
        case RAW:
            if (logicalType instanceof RawType) {
                final RawType<?> rawType = (RawType<?>) logicalType;
                if (clazz == RawValueData.class) {
                    return RawValueDataConverter.INSTANCE;
                } else {
                    return new GenericConverter<>(rawType.getTypeSerializer());
                }
            }
            // legacy
            TypeInformation typeInfo = logicalType instanceof LegacyTypeInformationType ? ((LegacyTypeInformationType) logicalType).getTypeInformation() : ((TypeInformationRawType) logicalType).getTypeInformation();
            // planner type info
            if (typeInfo instanceof StringDataTypeInfo) {
                return StringDataConverter.INSTANCE;
            } else if (typeInfo instanceof DecimalDataTypeInfo) {
                DecimalDataTypeInfo decimalType = (DecimalDataTypeInfo) typeInfo;
                return new DecimalDataConverter(decimalType.precision(), decimalType.scale());
            } else if (typeInfo instanceof BigDecimalTypeInfo) {
                BigDecimalTypeInfo decimalType = (BigDecimalTypeInfo) typeInfo;
                return new BigDecimalConverter(decimalType.precision(), decimalType.scale());
            } else if (typeInfo instanceof TimestampDataTypeInfo) {
                TimestampDataTypeInfo timestampDataTypeInfo = (TimestampDataTypeInfo) typeInfo;
                return new TimestampDataConverter(timestampDataTypeInfo.getPrecision());
            } else if (typeInfo instanceof LegacyLocalDateTimeTypeInfo) {
                LegacyLocalDateTimeTypeInfo dateTimeType = (LegacyLocalDateTimeTypeInfo) typeInfo;
                return new LocalDateTimeConverter(dateTimeType.getPrecision());
            } else if (typeInfo instanceof LegacyTimestampTypeInfo) {
                LegacyTimestampTypeInfo timestampType = (LegacyTimestampTypeInfo) typeInfo;
                return new TimestampConverter(timestampType.getPrecision());
            } else if (typeInfo instanceof LegacyInstantTypeInfo) {
                LegacyInstantTypeInfo instantTypeInfo = (LegacyInstantTypeInfo) typeInfo;
                return new InstantConverter(instantTypeInfo.getPrecision());
            }
            if (clazz == RawValueData.class) {
                return RawValueDataConverter.INSTANCE;
            }
            return new GenericConverter(typeInfo.createSerializer(new ExecutionConfig()));
        default:
            throw new RuntimeException("Not support dataType: " + dataType);
    }
}
Also used : DecimalDataTypeInfo(org.apache.flink.table.runtime.typeutils.DecimalDataTypeInfo) LogicalType(org.apache.flink.table.types.logical.LogicalType) PojoTypeInfo(org.apache.flink.api.java.typeutils.PojoTypeInfo) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) RawType(org.apache.flink.table.types.logical.RawType) TypeInformationRawType(org.apache.flink.table.types.logical.TypeInformationRawType) BigDecimalTypeInfo(org.apache.flink.table.runtime.typeutils.BigDecimalTypeInfo) InternalTypeInfo(org.apache.flink.table.runtime.typeutils.InternalTypeInfo) LegacyTimestampTypeInfo(org.apache.flink.table.runtime.typeutils.LegacyTimestampTypeInfo) Row(org.apache.flink.types.Row) BasicArrayTypeInfo(org.apache.flink.api.common.typeinfo.BasicArrayTypeInfo) LocalDateTime(java.time.LocalDateTime) CollectionDataType(org.apache.flink.table.types.CollectionDataType) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) Timestamp(java.sql.Timestamp) DecimalData(org.apache.flink.table.data.DecimalData) LegacyInstantTypeInfo(org.apache.flink.table.runtime.typeutils.LegacyInstantTypeInfo) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) TupleTypeInfoBase(org.apache.flink.api.java.typeutils.TupleTypeInfoBase) DataType(org.apache.flink.table.types.DataType) CollectionDataType(org.apache.flink.table.types.CollectionDataType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) TypeConversions.fromLegacyInfoToDataType(org.apache.flink.table.types.utils.TypeConversions.fromLegacyInfoToDataType) TimestampDataTypeInfo(org.apache.flink.table.runtime.typeutils.TimestampDataTypeInfo) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) StringDataTypeInfo(org.apache.flink.table.runtime.typeutils.StringDataTypeInfo) LegacyTypeInformationType(org.apache.flink.table.types.logical.LegacyTypeInformationType) LegacyLocalDateTimeTypeInfo(org.apache.flink.table.runtime.typeutils.LegacyLocalDateTimeTypeInfo) StringData(org.apache.flink.table.data.StringData) Tuple(org.apache.flink.api.java.tuple.Tuple) CompositeType(org.apache.flink.api.common.typeutils.CompositeType)

Example 3 with TypeInformationRawType

use of org.apache.flink.table.types.logical.TypeInformationRawType in project flink by apache.

the class EqualiserCodeGeneratorTest method testRaw.

@Test
public void testRaw() {
    RecordEqualiser equaliser = new EqualiserCodeGenerator(new LogicalType[] { new TypeInformationRawType<>(Types.INT) }).generateRecordEqualiser("RAW").newInstance(Thread.currentThread().getContextClassLoader());
    Function<RawValueData<?>, BinaryRowData> func = o -> {
        BinaryRowData row = new BinaryRowData(1);
        BinaryRowWriter writer = new BinaryRowWriter(row);
        writer.writeRawValue(0, o, new RawValueDataSerializer<>(IntSerializer.INSTANCE));
        writer.complete();
        return row;
    };
    assertBoolean(equaliser, func, RawValueData.fromObject(1), RawValueData.fromObject(1), true);
    assertBoolean(equaliser, func, RawValueData.fromObject(1), RawValueData.fromObject(2), false);
}
Also used : Types(org.apache.flink.api.common.typeinfo.Types) TypeInformationRawType(org.apache.flink.table.types.logical.TypeInformationRawType) IntStream(java.util.stream.IntStream) RecordEqualiser(org.apache.flink.table.runtime.generated.RecordEqualiser) TimestampData(org.apache.flink.table.data.TimestampData) Assert.assertTrue(org.junit.Assert.assertTrue) VarCharType(org.apache.flink.table.types.logical.VarCharType) Test(org.junit.Test) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) Function(java.util.function.Function) TimestampData.fromEpochMillis(org.apache.flink.table.data.TimestampData.fromEpochMillis) IntSerializer(org.apache.flink.api.common.typeutils.base.IntSerializer) StringData(org.apache.flink.table.data.StringData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) TimestampType(org.apache.flink.table.types.logical.TimestampType) RawValueDataSerializer(org.apache.flink.table.runtime.typeutils.RawValueDataSerializer) GenericRowData(org.apache.flink.table.data.GenericRowData) LogicalType(org.apache.flink.table.types.logical.LogicalType) RawValueData(org.apache.flink.table.data.RawValueData) Assert(org.junit.Assert) RecordEqualiser(org.apache.flink.table.runtime.generated.RecordEqualiser) RawValueDataSerializer(org.apache.flink.table.runtime.typeutils.RawValueDataSerializer) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) BinaryRowWriter(org.apache.flink.table.data.writer.BinaryRowWriter) RawValueData(org.apache.flink.table.data.RawValueData) Test(org.junit.Test)

Example 4 with TypeInformationRawType

use of org.apache.flink.table.types.logical.TypeInformationRawType in project flink by apache.

the class RawObjectConverter method create.

// --------------------------------------------------------------------------------------------
// Factory method
// --------------------------------------------------------------------------------------------
public static RawObjectConverter<?> create(DataType dataType) {
    final LogicalType logicalType = dataType.getLogicalType();
    final TypeSerializer<?> serializer;
    if (logicalType instanceof TypeInformationRawType) {
        serializer = ((TypeInformationRawType<?>) logicalType).getTypeInformation().createSerializer(new ExecutionConfig());
    } else {
        serializer = ((RawType<?>) dataType.getLogicalType()).getTypeSerializer();
    }
    return new RawObjectConverter<>(serializer);
}
Also used : TypeInformationRawType(org.apache.flink.table.types.logical.TypeInformationRawType) LogicalType(org.apache.flink.table.types.logical.LogicalType) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig)

Example 5 with TypeInformationRawType

use of org.apache.flink.table.types.logical.TypeInformationRawType in project flink by apache.

the class RawByteArrayConverter method create.

// --------------------------------------------------------------------------------------------
// Factory method
// --------------------------------------------------------------------------------------------
public static RawByteArrayConverter<?> create(DataType dataType) {
    final LogicalType logicalType = dataType.getLogicalType();
    final TypeSerializer<?> serializer;
    if (logicalType instanceof TypeInformationRawType) {
        serializer = ((TypeInformationRawType<?>) logicalType).getTypeInformation().createSerializer(new ExecutionConfig());
    } else {
        serializer = ((RawType<?>) dataType.getLogicalType()).getTypeSerializer();
    }
    return new RawByteArrayConverter<>(serializer);
}
Also used : TypeInformationRawType(org.apache.flink.table.types.logical.TypeInformationRawType) LogicalType(org.apache.flink.table.types.logical.LogicalType) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig)

Aggregations

TypeInformationRawType (org.apache.flink.table.types.logical.TypeInformationRawType)6 LogicalType (org.apache.flink.table.types.logical.LogicalType)5 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)3 GenericRowData (org.apache.flink.table.data.GenericRowData)3 RawValueData (org.apache.flink.table.data.RawValueData)2 RowData (org.apache.flink.table.data.RowData)2 StringData (org.apache.flink.table.data.StringData)2 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)2 DataType (org.apache.flink.table.types.DataType)2 Timestamp (java.sql.Timestamp)1 LocalDateTime (java.time.LocalDateTime)1 ArrayList (java.util.ArrayList)1 Function (java.util.function.Function)1 IntStream (java.util.stream.IntStream)1 BasicArrayTypeInfo (org.apache.flink.api.common.typeinfo.BasicArrayTypeInfo)1 TypeInformation (org.apache.flink.api.common.typeinfo.TypeInformation)1 Types (org.apache.flink.api.common.typeinfo.Types)1 CompositeType (org.apache.flink.api.common.typeutils.CompositeType)1 IntSerializer (org.apache.flink.api.common.typeutils.base.IntSerializer)1 Tuple (org.apache.flink.api.java.tuple.Tuple)1