Search in sources :

Example 26 with Field

use of org.apache.kafka.connect.data.Field in project debezium by debezium.

the class VerifyRecord method valuesFor.

private static Object[] valuesFor(Struct struct) {
    Object[] array = new Object[struct.schema().fields().size()];
    int index = 0;
    for (Field field : struct.schema().fields()) {
        array[index] = struct.get(field);
        ++index;
    }
    return array;
}
Also used : Field(org.apache.kafka.connect.data.Field)

Example 27 with Field

use of org.apache.kafka.connect.data.Field in project debezium by debezium.

the class PostgresSchemaIT method assertSchemaContent.

private void assertSchemaContent(String[] fields, Schema[] types, Schema keySchema) {
    IntStream.range(0, fields.length).forEach(i -> {
        String fieldName = fields[i].trim();
        Field field = keySchema.field(Strings.unquoteIdentifierPart(fieldName));
        assertNotNull(fieldName + " not found in schema", field);
        assertEquals("'" + fieldName + "' has incorrect schema.", types[i], field.schema());
    });
}
Also used : Field(org.apache.kafka.connect.data.Field)

Example 28 with Field

use of org.apache.kafka.connect.data.Field in project debezium by debezium.

the class MySqlValueConverters method converter.

@Override
public ValueConverter converter(Column column, Field fieldDefn) {
    // Handle a few MySQL-specific types based upon how they are handled by the MySQL binlog client ...
    String typeName = column.typeName().toUpperCase();
    if (matches(typeName, "JSON")) {
        return (data) -> convertJson(column, fieldDefn, data);
    }
    if (matches(typeName, "GEOMETRY") || matches(typeName, "LINESTRING") || matches(typeName, "POLYGON") || matches(typeName, "MULTIPOINT") || matches(typeName, "MULTILINESTRING") || matches(typeName, "MULTIPOLYGON") || matches(typeName, "GEOMETRYCOLLECTION")) {
        return (data -> convertGeometry(column, fieldDefn, data));
    }
    if (matches(typeName, "POINT")) {
        // backwards compatibility
        return (data -> convertPoint(column, fieldDefn, data));
    }
    if (matches(typeName, "YEAR")) {
        return (data) -> convertYearToInt(column, fieldDefn, data);
    }
    if (matches(typeName, "ENUM")) {
        // Build up the character array based upon the column's type ...
        List<String> options = extractEnumAndSetOptions(column);
        return (data) -> convertEnumToString(options, column, fieldDefn, data);
    }
    if (matches(typeName, "SET")) {
        // Build up the character array based upon the column's type ...
        List<String> options = extractEnumAndSetOptions(column);
        return (data) -> convertSetToString(options, column, fieldDefn, data);
    }
    if (matches(typeName, "TINYINT UNSIGNED") || matches(typeName, "TINYINT UNSIGNED ZEROFILL")) {
        // Convert TINYINT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
        return (data) -> convertUnsignedTinyint(column, fieldDefn, data);
    }
    if (matches(typeName, "SMALLINT UNSIGNED") || matches(typeName, "SMALLINT UNSIGNED ZEROFILL")) {
        // Convert SMALLINT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
        return (data) -> convertUnsignedSmallint(column, fieldDefn, data);
    }
    if (matches(typeName, "MEDIUMINT UNSIGNED") || matches(typeName, "MEDIUMINT UNSIGNED ZEROFILL")) {
        // Convert SMALLINT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
        return (data) -> convertUnsignedMediumint(column, fieldDefn, data);
    }
    if (matches(typeName, "INT UNSIGNED") || matches(typeName, "INT UNSIGNED ZEROFILL")) {
        // Convert INT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
        return (data) -> convertUnsignedInt(column, fieldDefn, data);
    }
    if (matches(typeName, "BIGINT UNSIGNED") || matches(typeName, "BIGINT UNSIGNED ZEROFILL")) {
        switch(super.bigIntUnsignedMode) {
            case LONG:
                return (data) -> convertBigInt(column, fieldDefn, data);
            case PRECISE:
                // Convert BIGINT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
                return (data) -> convertUnsignedBigint(column, fieldDefn, data);
        }
    }
    // We have to convert bytes encoded in the column's character set ...
    switch(column.jdbcType()) {
        // variable-length
        case Types.CHAR:
        // variable-length
        case Types.VARCHAR:
        // variable-length
        case Types.LONGVARCHAR:
        // variable-length
        case Types.CLOB:
        // fixed-length
        case Types.NCHAR:
        // fixed-length
        case Types.NVARCHAR:
        // fixed-length
        case Types.LONGNVARCHAR:
        // fixed-length
        case Types.NCLOB:
        case Types.DATALINK:
        case Types.SQLXML:
            Charset charset = charsetFor(column);
            if (charset != null) {
                logger.debug("Using {} charset by default for column: {}", charset, column);
                return (data) -> convertString(column, fieldDefn, charset, data);
            }
            logger.warn("Using UTF-8 charset by default for column without charset: {}", column);
            return (data) -> convertString(column, fieldDefn, StandardCharsets.UTF_8, data);
        case Types.TIME:
            if (adaptiveTimeMicrosecondsPrecisionMode)
                return data -> convertDurationToMicroseconds(column, fieldDefn, data);
        default:
            break;
    }
    // Otherwise, let the base class handle it ...
    return super.converter(column, fieldDefn);
}
Also used : ChronoField(java.time.temporal.ChronoField) Arrays(java.util.Arrays) Json(io.debezium.data.Json) ByteBuffer(java.nio.ByteBuffer) Schema(org.apache.kafka.connect.data.Schema) BigDecimal(java.math.BigDecimal) Year(io.debezium.time.Year) CharsetMapping(com.mysql.jdbc.CharsetMapping) Charset(java.nio.charset.Charset) Duration(java.time.Duration) IllegalCharsetNameException(java.nio.charset.IllegalCharsetNameException) ZoneOffset(java.time.ZoneOffset) JsonBinary(com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary) Strings(io.debezium.util.Strings) AbstractRowsEventDataDeserializer(com.github.shyiko.mysql.binlog.event.deserialization.AbstractRowsEventDataDeserializer) Field(org.apache.kafka.connect.data.Field) TemporalPrecisionMode(io.debezium.jdbc.TemporalPrecisionMode) IOException(java.io.IOException) SourceRecord(org.apache.kafka.connect.source.SourceRecord) StandardCharsets(java.nio.charset.StandardCharsets) ByteOrder(java.nio.ByteOrder) Decimal(org.apache.kafka.connect.data.Decimal) List(java.util.List) OffsetDateTime(java.time.OffsetDateTime) ChronoUnit(java.time.temporal.ChronoUnit) Immutable(io.debezium.annotation.Immutable) Column(io.debezium.relational.Column) ConnectException(org.apache.kafka.connect.errors.ConnectException) JdbcValueConverters(io.debezium.jdbc.JdbcValueConverters) ValueConverter(io.debezium.relational.ValueConverter) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) Temporal(java.time.temporal.Temporal) Types(java.sql.Types) Charset(java.nio.charset.Charset)

Example 29 with Field

use of org.apache.kafka.connect.data.Field in project kafka by apache.

the class Cast method applyWithSchema.

private R applyWithSchema(R record) {
    Schema valueSchema = operatingSchema(record);
    Schema updatedSchema = getOrBuildSchema(valueSchema);
    // Whole-record casting
    if (wholeValueCastType != null)
        return newRecord(record, updatedSchema, castValueToType(valueSchema, operatingValue(record), wholeValueCastType));
    // Casting within a struct
    final Struct value = requireStruct(operatingValue(record), PURPOSE);
    final Struct updatedValue = new Struct(updatedSchema);
    for (Field field : value.schema().fields()) {
        final Object origFieldValue = value.get(field);
        final Schema.Type targetType = casts.get(field.name());
        final Object newFieldValue = targetType != null ? castValueToType(field.schema(), origFieldValue, targetType) : origFieldValue;
        log.trace("Cast field '{}' from '{}' to '{}'", field.name(), origFieldValue, newFieldValue);
        updatedValue.put(updatedSchema.field(field.name()), newFieldValue);
    }
    return newRecord(record, updatedSchema, updatedValue);
}
Also used : Field(org.apache.kafka.connect.data.Field) Schema(org.apache.kafka.connect.data.Schema) ConnectSchema(org.apache.kafka.connect.data.ConnectSchema) Type(org.apache.kafka.connect.data.Schema.Type) Requirements.requireStruct(org.apache.kafka.connect.transforms.util.Requirements.requireStruct) Struct(org.apache.kafka.connect.data.Struct)

Example 30 with Field

use of org.apache.kafka.connect.data.Field in project kafka by apache.

the class ExtractField method apply.

@Override
public R apply(R record) {
    final Schema schema = operatingSchema(record);
    if (schema == null) {
        final Map<String, Object> value = requireMapOrNull(operatingValue(record), PURPOSE);
        return newRecord(record, null, value == null ? null : value.get(fieldName));
    } else {
        final Struct value = requireStructOrNull(operatingValue(record), PURPOSE);
        Field field = schema.field(fieldName);
        if (field == null) {
            throw new IllegalArgumentException("Unknown field: " + fieldName);
        }
        return newRecord(record, field.schema(), value == null ? null : value.get(fieldName));
    }
}
Also used : Field(org.apache.kafka.connect.data.Field) Schema(org.apache.kafka.connect.data.Schema) Struct(org.apache.kafka.connect.data.Struct)

Aggregations

Field (org.apache.kafka.connect.data.Field)82 Struct (org.apache.kafka.connect.data.Struct)38 Schema (org.apache.kafka.connect.data.Schema)33 SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)17 DataException (org.apache.kafka.connect.errors.DataException)14 List (java.util.List)12 ArrayList (java.util.ArrayList)11 Requirements.requireStruct (org.apache.kafka.connect.transforms.util.Requirements.requireStruct)11 HashMap (java.util.HashMap)10 Map (java.util.Map)8 Test (org.junit.Test)8 Date (java.util.Date)7 ConnectSchema (org.apache.kafka.connect.data.ConnectSchema)6 KsqlException (io.confluent.ksql.util.KsqlException)5 BigDecimal (java.math.BigDecimal)5 ArrayNode (com.fasterxml.jackson.databind.node.ArrayNode)4 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)4 SchemaKStream (io.confluent.ksql.structured.SchemaKStream)4 ByteBuffer (java.nio.ByteBuffer)4 JsonNode (com.fasterxml.jackson.databind.JsonNode)3