Search in sources :

Example 1 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class HBaseTableSchema method fromDataType.

/**
 * Construct a {@link HBaseTableSchema} from a {@link DataType}.
 */
public static HBaseTableSchema fromDataType(DataType physicalRowType) {
    HBaseTableSchema hbaseSchema = new HBaseTableSchema();
    RowType rowType = (RowType) physicalRowType.getLogicalType();
    for (RowType.RowField field : rowType.getFields()) {
        LogicalType fieldType = field.getType();
        if (fieldType.getTypeRoot() == LogicalTypeRoot.ROW) {
            RowType familyType = (RowType) fieldType;
            String familyName = field.getName();
            for (RowType.RowField qualifier : familyType.getFields()) {
                hbaseSchema.addColumn(familyName, qualifier.getName(), fromLogicalToDataType(qualifier.getType()));
            }
        } else if (fieldType.getChildren().size() == 0) {
            hbaseSchema.setRowKey(field.getName(), fromLogicalToDataType(fieldType));
        } else {
            throw new IllegalArgumentException("Unsupported field type '" + fieldType + "' for HBase.");
        }
    }
    return hbaseSchema;
}
Also used : RowType(org.apache.flink.table.types.logical.RowType) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Example 2 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class AvroToRowDataConverters method createMapConverter.

private static AvroToRowDataConverter createMapConverter(LogicalType type) {
    final AvroToRowDataConverter keyConverter = createConverter(DataTypes.STRING().getLogicalType());
    final AvroToRowDataConverter valueConverter = createNullableConverter(extractValueTypeToAvroMap(type));
    return avroObject -> {
        final Map<?, ?> map = (Map<?, ?>) avroObject;
        Map<Object, Object> result = new HashMap<>();
        for (Map.Entry<?, ?> entry : map.entrySet()) {
            Object key = keyConverter.convert(entry.getKey());
            Object value = valueConverter.convert(entry.getValue());
            result.put(key, value);
        }
        return new GenericMapData(result);
    };
}
Also used : ChronoField(java.time.temporal.ChronoField) Array(java.lang.reflect.Array) GenericArrayData(org.apache.flink.table.data.GenericArrayData) HashMap(java.util.HashMap) RowType(org.apache.flink.table.types.logical.RowType) ByteBuffer(java.nio.ByteBuffer) GenericRowData(org.apache.flink.table.data.GenericRowData) DecimalType(org.apache.flink.table.types.logical.DecimalType) GenericMapData(org.apache.flink.table.data.GenericMapData) Map(java.util.Map) LocalTime(java.time.LocalTime) LogicalTypeUtils(org.apache.flink.table.types.logical.utils.LogicalTypeUtils) IndexedRecord(org.apache.avro.generic.IndexedRecord) GenericRecord(org.apache.avro.generic.GenericRecord) RowData(org.apache.flink.table.data.RowData) GenericFixed(org.apache.avro.generic.GenericFixed) TimestampData(org.apache.flink.table.data.TimestampData) DataTypes(org.apache.flink.table.api.DataTypes) DecimalData(org.apache.flink.table.data.DecimalData) ArrayType(org.apache.flink.table.types.logical.ArrayType) Instant(java.time.Instant) AvroSchemaConverter.extractValueTypeToAvroMap(org.apache.flink.formats.avro.typeutils.AvroSchemaConverter.extractValueTypeToAvroMap) Serializable(java.io.Serializable) StringData(org.apache.flink.table.data.StringData) List(java.util.List) LogicalType(org.apache.flink.table.types.logical.LogicalType) LocalDate(java.time.LocalDate) Internal(org.apache.flink.annotation.Internal) GenericMapData(org.apache.flink.table.data.GenericMapData) HashMap(java.util.HashMap) Map(java.util.Map) AvroSchemaConverter.extractValueTypeToAvroMap(org.apache.flink.formats.avro.typeutils.AvroSchemaConverter.extractValueTypeToAvroMap)

Example 3 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class RowDataToAvroConverters method createMapConverter.

private static RowDataToAvroConverter createMapConverter(LogicalType type) {
    LogicalType valueType = extractValueTypeToAvroMap(type);
    final ArrayData.ElementGetter valueGetter = ArrayData.createElementGetter(valueType);
    final RowDataToAvroConverter valueConverter = createConverter(valueType);
    return new RowDataToAvroConverter() {

        private static final long serialVersionUID = 1L;

        @Override
        public Object convert(Schema schema, Object object) {
            final Schema valueSchema = schema.getValueType();
            final MapData mapData = (MapData) object;
            final ArrayData keyArray = mapData.keyArray();
            final ArrayData valueArray = mapData.valueArray();
            final Map<Object, Object> map = new HashMap<>(mapData.size());
            for (int i = 0; i < mapData.size(); ++i) {
                final String key = keyArray.getString(i).toString();
                final Object value = valueConverter.convert(valueSchema, valueGetter.getElementOrNull(valueArray, i));
                map.put(key, value);
            }
            return map;
        }
    };
}
Also used : HashMap(java.util.HashMap) MapData(org.apache.flink.table.data.MapData) Schema(org.apache.avro.Schema) LogicalType(org.apache.flink.table.types.logical.LogicalType) ArrayData(org.apache.flink.table.data.ArrayData)

Example 4 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class AvroSchemaConverter method extractValueTypeToAvroMap.

public static LogicalType extractValueTypeToAvroMap(LogicalType type) {
    LogicalType keyType;
    LogicalType valueType;
    if (type instanceof MapType) {
        MapType mapType = (MapType) type;
        keyType = mapType.getKeyType();
        valueType = mapType.getValueType();
    } else {
        MultisetType multisetType = (MultisetType) type;
        keyType = multisetType.getElementType();
        valueType = new IntType();
    }
    if (!keyType.is(LogicalTypeFamily.CHARACTER_STRING)) {
        throw new UnsupportedOperationException("Avro format doesn't support non-string as key type of map. " + "The key type is: " + keyType.asSummaryString());
    }
    return valueType;
}
Also used : LogicalType(org.apache.flink.table.types.logical.LogicalType) MultisetType(org.apache.flink.table.types.logical.MultisetType) MapType(org.apache.flink.table.types.logical.MapType) IntType(org.apache.flink.table.types.logical.IntType)

Example 5 with LogicalType

use of org.apache.flink.table.types.logical.LogicalType in project flink by apache.

the class AvroSchemaConverter method convertToSchema.

/**
 * Converts Flink SQL {@link LogicalType} (can be nested) into an Avro schema.
 *
 * <p>The "{rowName}_" is used as the nested row type name prefix in order to generate the right
 * schema. Nested record type that only differs with type name is still compatible.
 *
 * @param logicalType logical type
 * @param rowName the record name
 * @return Avro's {@link Schema} matching this logical type.
 */
public static Schema convertToSchema(LogicalType logicalType, String rowName) {
    int precision;
    boolean nullable = logicalType.isNullable();
    switch(logicalType.getTypeRoot()) {
        case NULL:
            return SchemaBuilder.builder().nullType();
        case BOOLEAN:
            Schema bool = SchemaBuilder.builder().booleanType();
            return nullable ? nullableSchema(bool) : bool;
        case TINYINT:
        case SMALLINT:
        case INTEGER:
            Schema integer = SchemaBuilder.builder().intType();
            return nullable ? nullableSchema(integer) : integer;
        case BIGINT:
            Schema bigint = SchemaBuilder.builder().longType();
            return nullable ? nullableSchema(bigint) : bigint;
        case FLOAT:
            Schema f = SchemaBuilder.builder().floatType();
            return nullable ? nullableSchema(f) : f;
        case DOUBLE:
            Schema d = SchemaBuilder.builder().doubleType();
            return nullable ? nullableSchema(d) : d;
        case CHAR:
        case VARCHAR:
            Schema str = SchemaBuilder.builder().stringType();
            return nullable ? nullableSchema(str) : str;
        case BINARY:
        case VARBINARY:
            Schema binary = SchemaBuilder.builder().bytesType();
            return nullable ? nullableSchema(binary) : binary;
        case TIMESTAMP_WITHOUT_TIME_ZONE:
            // use long to represents Timestamp
            final TimestampType timestampType = (TimestampType) logicalType;
            precision = timestampType.getPrecision();
            org.apache.avro.LogicalType avroLogicalType;
            if (precision <= 3) {
                avroLogicalType = LogicalTypes.timestampMillis();
            } else {
                throw new IllegalArgumentException("Avro does not support TIMESTAMP type " + "with precision: " + precision + ", it only supports precision less than 3.");
            }
            Schema timestamp = avroLogicalType.addToSchema(SchemaBuilder.builder().longType());
            return nullable ? nullableSchema(timestamp) : timestamp;
        case DATE:
            // use int to represents Date
            Schema date = LogicalTypes.date().addToSchema(SchemaBuilder.builder().intType());
            return nullable ? nullableSchema(date) : date;
        case TIME_WITHOUT_TIME_ZONE:
            precision = ((TimeType) logicalType).getPrecision();
            if (precision > 3) {
                throw new IllegalArgumentException("Avro does not support TIME type with precision: " + precision + ", it only supports precision less than 3.");
            }
            // use int to represents Time, we only support millisecond when deserialization
            Schema time = LogicalTypes.timeMillis().addToSchema(SchemaBuilder.builder().intType());
            return nullable ? nullableSchema(time) : time;
        case DECIMAL:
            DecimalType decimalType = (DecimalType) logicalType;
            // store BigDecimal as byte[]
            Schema decimal = LogicalTypes.decimal(decimalType.getPrecision(), decimalType.getScale()).addToSchema(SchemaBuilder.builder().bytesType());
            return nullable ? nullableSchema(decimal) : decimal;
        case ROW:
            RowType rowType = (RowType) logicalType;
            List<String> fieldNames = rowType.getFieldNames();
            // we have to make sure the record name is different in a Schema
            SchemaBuilder.FieldAssembler<Schema> builder = SchemaBuilder.builder().record(rowName).fields();
            for (int i = 0; i < rowType.getFieldCount(); i++) {
                String fieldName = fieldNames.get(i);
                LogicalType fieldType = rowType.getTypeAt(i);
                SchemaBuilder.GenericDefault<Schema> fieldBuilder = builder.name(fieldName).type(convertToSchema(fieldType, rowName + "_" + fieldName));
                if (fieldType.isNullable()) {
                    builder = fieldBuilder.withDefault(null);
                } else {
                    builder = fieldBuilder.noDefault();
                }
            }
            Schema record = builder.endRecord();
            return nullable ? nullableSchema(record) : record;
        case MULTISET:
        case MAP:
            Schema map = SchemaBuilder.builder().map().values(convertToSchema(extractValueTypeToAvroMap(logicalType), rowName));
            return nullable ? nullableSchema(map) : map;
        case ARRAY:
            ArrayType arrayType = (ArrayType) logicalType;
            Schema array = SchemaBuilder.builder().array().items(convertToSchema(arrayType.getElementType(), rowName));
            return nullable ? nullableSchema(array) : array;
        case RAW:
        case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
        default:
            throw new UnsupportedOperationException("Unsupported to derive Schema for type: " + logicalType);
    }
}
Also used : Schema(org.apache.avro.Schema) AvroRowDeserializationSchema(org.apache.flink.formats.avro.AvroRowDeserializationSchema) AvroRowSerializationSchema(org.apache.flink.formats.avro.AvroRowSerializationSchema) RowType(org.apache.flink.table.types.logical.RowType) LogicalType(org.apache.flink.table.types.logical.LogicalType) ArrayType(org.apache.flink.table.types.logical.ArrayType) SchemaBuilder(org.apache.avro.SchemaBuilder) TimestampType(org.apache.flink.table.types.logical.TimestampType) DecimalType(org.apache.flink.table.types.logical.DecimalType)

Aggregations

LogicalType (org.apache.flink.table.types.logical.LogicalType)192 DataType (org.apache.flink.table.types.DataType)53 RowType (org.apache.flink.table.types.logical.RowType)53 RowData (org.apache.flink.table.data.RowData)45 List (java.util.List)29 ArrayList (java.util.ArrayList)28 TableException (org.apache.flink.table.api.TableException)25 TimestampType (org.apache.flink.table.types.logical.TimestampType)25 Internal (org.apache.flink.annotation.Internal)21 IntType (org.apache.flink.table.types.logical.IntType)21 Map (java.util.Map)20 ValidationException (org.apache.flink.table.api.ValidationException)20 ArrayType (org.apache.flink.table.types.logical.ArrayType)19 DecimalType (org.apache.flink.table.types.logical.DecimalType)19 LocalZonedTimestampType (org.apache.flink.table.types.logical.LocalZonedTimestampType)17 Test (org.junit.Test)17 BigIntType (org.apache.flink.table.types.logical.BigIntType)16 LegacyTypeInformationType (org.apache.flink.table.types.logical.LegacyTypeInformationType)16 GenericRowData (org.apache.flink.table.data.GenericRowData)15 Arrays (java.util.Arrays)14