Search in sources :

Example 71 with Field

use of org.apache.kafka.connect.data.Field in project kafka by apache.

the class JsonConverter method asJsonSchema.

public ObjectNode asJsonSchema(Schema schema) {
    if (schema == null)
        return null;
    ObjectNode cached = fromConnectSchemaCache.get(schema);
    if (cached != null)
        return cached;
    final ObjectNode jsonSchema;
    switch(schema.type()) {
        case BOOLEAN:
            jsonSchema = JsonSchema.BOOLEAN_SCHEMA.deepCopy();
            break;
        case BYTES:
            jsonSchema = JsonSchema.BYTES_SCHEMA.deepCopy();
            break;
        case FLOAT64:
            jsonSchema = JsonSchema.DOUBLE_SCHEMA.deepCopy();
            break;
        case FLOAT32:
            jsonSchema = JsonSchema.FLOAT_SCHEMA.deepCopy();
            break;
        case INT8:
            jsonSchema = JsonSchema.INT8_SCHEMA.deepCopy();
            break;
        case INT16:
            jsonSchema = JsonSchema.INT16_SCHEMA.deepCopy();
            break;
        case INT32:
            jsonSchema = JsonSchema.INT32_SCHEMA.deepCopy();
            break;
        case INT64:
            jsonSchema = JsonSchema.INT64_SCHEMA.deepCopy();
            break;
        case STRING:
            jsonSchema = JsonSchema.STRING_SCHEMA.deepCopy();
            break;
        case ARRAY:
            jsonSchema = JSON_NODE_FACTORY.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.ARRAY_TYPE_NAME);
            jsonSchema.set(JsonSchema.ARRAY_ITEMS_FIELD_NAME, asJsonSchema(schema.valueSchema()));
            break;
        case MAP:
            jsonSchema = JSON_NODE_FACTORY.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.MAP_TYPE_NAME);
            jsonSchema.set(JsonSchema.MAP_KEY_FIELD_NAME, asJsonSchema(schema.keySchema()));
            jsonSchema.set(JsonSchema.MAP_VALUE_FIELD_NAME, asJsonSchema(schema.valueSchema()));
            break;
        case STRUCT:
            jsonSchema = JSON_NODE_FACTORY.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.STRUCT_TYPE_NAME);
            ArrayNode fields = JSON_NODE_FACTORY.arrayNode();
            for (Field field : schema.fields()) {
                ObjectNode fieldJsonSchema = asJsonSchema(field.schema()).deepCopy();
                fieldJsonSchema.put(JsonSchema.STRUCT_FIELD_NAME_FIELD_NAME, field.name());
                fields.add(fieldJsonSchema);
            }
            jsonSchema.set(JsonSchema.STRUCT_FIELDS_FIELD_NAME, fields);
            break;
        default:
            throw new DataException("Couldn't translate unsupported schema type " + schema + ".");
    }
    jsonSchema.put(JsonSchema.SCHEMA_OPTIONAL_FIELD_NAME, schema.isOptional());
    if (schema.name() != null)
        jsonSchema.put(JsonSchema.SCHEMA_NAME_FIELD_NAME, schema.name());
    if (schema.version() != null)
        jsonSchema.put(JsonSchema.SCHEMA_VERSION_FIELD_NAME, schema.version());
    if (schema.doc() != null)
        jsonSchema.put(JsonSchema.SCHEMA_DOC_FIELD_NAME, schema.doc());
    if (schema.parameters() != null) {
        ObjectNode jsonSchemaParams = JSON_NODE_FACTORY.objectNode();
        for (Map.Entry<String, String> prop : schema.parameters().entrySet()) jsonSchemaParams.put(prop.getKey(), prop.getValue());
        jsonSchema.set(JsonSchema.SCHEMA_PARAMETERS_FIELD_NAME, jsonSchemaParams);
    }
    if (schema.defaultValue() != null)
        jsonSchema.set(JsonSchema.SCHEMA_DEFAULT_FIELD_NAME, convertToJson(schema, schema.defaultValue()));
    fromConnectSchemaCache.put(schema, jsonSchema);
    return jsonSchema;
}
Also used : Field(org.apache.kafka.connect.data.Field) DataException(org.apache.kafka.connect.errors.DataException) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) ArrayNode(com.fasterxml.jackson.databind.node.ArrayNode) HashMap(java.util.HashMap) Map(java.util.Map) EnumMap(java.util.EnumMap)

Example 72 with Field

use of org.apache.kafka.connect.data.Field in project kafka by apache.

the class TimestampConverter method applyValueWithSchema.

private Struct applyValueWithSchema(Struct value, Schema updatedSchema) {
    if (value == null) {
        return null;
    }
    Struct updatedValue = new Struct(updatedSchema);
    for (Field field : value.schema().fields()) {
        final Object updatedFieldValue;
        if (field.name().equals(config.field)) {
            updatedFieldValue = convertTimestamp(value.get(field), timestampTypeFromSchema(field.schema()));
        } else {
            updatedFieldValue = value.get(field);
        }
        updatedValue.put(field.name(), updatedFieldValue);
    }
    return updatedValue;
}
Also used : Field(org.apache.kafka.connect.data.Field) Struct(org.apache.kafka.connect.data.Struct)

Example 73 with Field

use of org.apache.kafka.connect.data.Field in project kafka by apache.

the class TimestampConverter method applyWithSchema.

private R applyWithSchema(R record) {
    final Schema schema = operatingSchema(record);
    if (config.field.isEmpty()) {
        Object value = operatingValue(record);
        // New schema is determined by the requested target timestamp type
        Schema updatedSchema = TRANSLATORS.get(config.type).typeSchema(schema.isOptional());
        return newRecord(record, updatedSchema, convertTimestamp(value, timestampTypeFromSchema(schema)));
    } else {
        final Struct value = requireStructOrNull(operatingValue(record), PURPOSE);
        Schema updatedSchema = schemaUpdateCache.get(schema);
        if (updatedSchema == null) {
            SchemaBuilder builder = SchemaUtil.copySchemaBasics(schema, SchemaBuilder.struct());
            for (Field field : schema.fields()) {
                if (field.name().equals(config.field)) {
                    builder.field(field.name(), TRANSLATORS.get(config.type).typeSchema(field.schema().isOptional()));
                } else {
                    builder.field(field.name(), field.schema());
                }
            }
            if (schema.isOptional())
                builder.optional();
            if (schema.defaultValue() != null) {
                Struct updatedDefaultValue = applyValueWithSchema((Struct) schema.defaultValue(), builder);
                builder.defaultValue(updatedDefaultValue);
            }
            updatedSchema = builder.build();
            schemaUpdateCache.put(schema, updatedSchema);
        }
        Struct updatedValue = applyValueWithSchema(value, updatedSchema);
        return newRecord(record, updatedSchema, updatedValue);
    }
}
Also used : Field(org.apache.kafka.connect.data.Field) Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) Struct(org.apache.kafka.connect.data.Struct)

Example 74 with Field

use of org.apache.kafka.connect.data.Field in project kafka by apache.

the class ValueToKey method applyWithSchema.

private R applyWithSchema(R record) {
    final Struct value = requireStruct(record.value(), PURPOSE);
    Schema keySchema = valueToKeySchemaCache.get(value.schema());
    if (keySchema == null) {
        final SchemaBuilder keySchemaBuilder = SchemaBuilder.struct();
        for (String field : fields) {
            final Field fieldFromValue = value.schema().field(field);
            if (fieldFromValue == null) {
                throw new DataException("Field does not exist: " + field);
            }
            keySchemaBuilder.field(field, fieldFromValue.schema());
        }
        keySchema = keySchemaBuilder.build();
        valueToKeySchemaCache.put(value.schema(), keySchema);
    }
    final Struct key = new Struct(keySchema);
    for (String field : fields) {
        key.put(field, value.get(field));
    }
    return record.newRecord(record.topic(), record.kafkaPartition(), keySchema, key, value.schema(), value, record.timestamp());
}
Also used : Field(org.apache.kafka.connect.data.Field) DataException(org.apache.kafka.connect.errors.DataException) Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) Requirements.requireStruct(org.apache.kafka.connect.transforms.util.Requirements.requireStruct) Struct(org.apache.kafka.connect.data.Struct)

Example 75 with Field

use of org.apache.kafka.connect.data.Field in project debezium by debezium.

the class PostgresValueConverter method createArrayConverter.

private ValueConverter createArrayConverter(Column column, Field fieldDefn) {
    PostgresType arrayType = typeRegistry.get(column.nativeType());
    PostgresType elementType = arrayType.getElementType();
    final String elementTypeName = elementType.getName();
    final String elementColumnName = column.name() + "-element";
    final Column elementColumn = Column.editor().name(elementColumnName).jdbcType(elementType.getJdbcId()).nativeType(elementType.getOid()).type(elementTypeName).optional(true).scale(column.scale()).length(column.length()).create();
    Schema elementSchema = schemaBuilder(elementColumn).optional().build();
    final Field elementField = new Field(elementColumnName, 0, elementSchema);
    final ValueConverter elementConverter = converter(elementColumn, elementField);
    return data -> convertArray(column, fieldDefn, elementConverter, data);
}
Also used : PGInterval(org.postgresql.util.PGInterval) Json(io.debezium.data.Json) NumberConversions(io.debezium.util.NumberConversions) MicroDuration(io.debezium.time.MicroDuration) Date(java.util.Date) LocalDateTime(java.time.LocalDateTime) Schema(org.apache.kafka.connect.data.Schema) PGobject(org.postgresql.util.PGobject) BigDecimal(java.math.BigDecimal) SQLException(java.sql.SQLException) PgProto(io.debezium.connector.postgresql.proto.PgProto) ZonedTime(io.debezium.time.ZonedTime) LocalTime(java.time.LocalTime) VariableScaleDecimal(io.debezium.data.VariableScaleDecimal) BigInteger(java.math.BigInteger) ZoneOffset(java.time.ZoneOffset) Point(io.debezium.data.geometry.Point) Geography(io.debezium.data.geometry.Geography) ZonedTimestamp(io.debezium.time.ZonedTimestamp) OffsetTime(java.time.OffsetTime) Bits(io.debezium.data.Bits) Field(org.apache.kafka.connect.data.Field) TemporalPrecisionMode(io.debezium.jdbc.TemporalPrecisionMode) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) TimeUnit(java.util.concurrent.TimeUnit) Decimal(org.apache.kafka.connect.data.Decimal) Geometry(io.debezium.data.geometry.Geometry) List(java.util.List) OffsetDateTime(java.time.OffsetDateTime) SpecialValueDecimal(io.debezium.data.SpecialValueDecimal) Column(io.debezium.relational.Column) JdbcValueConverters(io.debezium.jdbc.JdbcValueConverters) ValueConverter(io.debezium.relational.ValueConverter) Optional(java.util.Optional) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) Uuid(io.debezium.data.Uuid) UnsupportedEncodingException(java.io.UnsupportedEncodingException) Collections(java.util.Collections) PGpoint(org.postgresql.geometric.PGpoint) Field(org.apache.kafka.connect.data.Field) Column(io.debezium.relational.Column) Schema(org.apache.kafka.connect.data.Schema) ValueConverter(io.debezium.relational.ValueConverter)

Aggregations

Field (org.apache.kafka.connect.data.Field)82 Struct (org.apache.kafka.connect.data.Struct)38 Schema (org.apache.kafka.connect.data.Schema)33 SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)17 DataException (org.apache.kafka.connect.errors.DataException)14 List (java.util.List)12 ArrayList (java.util.ArrayList)11 Requirements.requireStruct (org.apache.kafka.connect.transforms.util.Requirements.requireStruct)11 HashMap (java.util.HashMap)10 Map (java.util.Map)8 Test (org.junit.Test)8 Date (java.util.Date)7 ConnectSchema (org.apache.kafka.connect.data.ConnectSchema)6 KsqlException (io.confluent.ksql.util.KsqlException)5 BigDecimal (java.math.BigDecimal)5 ArrayNode (com.fasterxml.jackson.databind.node.ArrayNode)4 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)4 SchemaKStream (io.confluent.ksql.structured.SchemaKStream)4 ByteBuffer (java.nio.ByteBuffer)4 JsonNode (com.fasterxml.jackson.databind.JsonNode)3