Search in sources :

Example 71 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project apache-kafka-on-k8s by banzaicloud.

the class ValueToKey method applyWithSchema.

private R applyWithSchema(R record) {
    final Struct value = requireStruct(record.value(), PURPOSE);
    Schema keySchema = valueToKeySchemaCache.get(value.schema());
    if (keySchema == null) {
        final SchemaBuilder keySchemaBuilder = SchemaBuilder.struct();
        for (String field : fields) {
            final Schema fieldSchema = value.schema().field(field).schema();
            keySchemaBuilder.field(field, fieldSchema);
        }
        keySchema = keySchemaBuilder.build();
        valueToKeySchemaCache.put(value.schema(), keySchema);
    }
    final Struct key = new Struct(keySchema);
    for (String field : fields) {
        key.put(field, value.get(field));
    }
    return record.newRecord(record.topic(), record.kafkaPartition(), keySchema, key, value.schema(), value, record.timestamp());
}
Also used : Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) Requirements.requireStruct(org.apache.kafka.connect.transforms.util.Requirements.requireStruct) Struct(org.apache.kafka.connect.data.Struct)

Example 72 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.

the class Cast method getOrBuildSchema.

private Schema getOrBuildSchema(Schema valueSchema) {
    Schema updatedSchema = schemaUpdateCache.get(valueSchema);
    if (updatedSchema != null)
        return updatedSchema;
    final SchemaBuilder builder;
    if (wholeValueCastType != null) {
        builder = SchemaUtil.copySchemaBasics(valueSchema, convertFieldType(wholeValueCastType));
    } else {
        builder = SchemaUtil.copySchemaBasics(valueSchema, SchemaBuilder.struct());
        for (Field field : valueSchema.fields()) {
            if (casts.containsKey(field.name())) {
                SchemaBuilder fieldBuilder = convertFieldType(casts.get(field.name()));
                if (field.schema().isOptional())
                    fieldBuilder.optional();
                if (field.schema().defaultValue() != null) {
                    Schema fieldSchema = field.schema();
                    fieldBuilder.defaultValue(castValueToType(fieldSchema, fieldSchema.defaultValue(), fieldBuilder.type()));
                }
                builder.field(field.name(), fieldBuilder.build());
            } else {
                builder.field(field.name(), field.schema());
            }
        }
    }
    if (valueSchema.isOptional())
        builder.optional();
    if (valueSchema.defaultValue() != null)
        builder.defaultValue(castValueToType(valueSchema, valueSchema.defaultValue(), builder.type()));
    updatedSchema = builder.build();
    schemaUpdateCache.put(valueSchema, updatedSchema);
    return updatedSchema;
}
Also used : Field(org.apache.kafka.connect.data.Field) Schema(org.apache.kafka.connect.data.Schema) ConnectSchema(org.apache.kafka.connect.data.ConnectSchema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder)

Example 73 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.

the class Flatten method applyWithSchema.

private R applyWithSchema(R record) {
    final Struct value = requireStructOrNull(operatingValue(record), PURPOSE);
    Schema schema = operatingSchema(record);
    Schema updatedSchema = schemaUpdateCache.get(schema);
    if (updatedSchema == null) {
        final SchemaBuilder builder = SchemaUtil.copySchemaBasics(schema, SchemaBuilder.struct());
        Struct defaultValue = (Struct) schema.defaultValue();
        buildUpdatedSchema(schema, "", builder, schema.isOptional(), defaultValue);
        updatedSchema = builder.build();
        schemaUpdateCache.put(schema, updatedSchema);
    }
    if (value == null) {
        return newRecord(record, updatedSchema, null);
    } else {
        final Struct updatedValue = new Struct(updatedSchema);
        buildWithSchema(value, "", updatedValue);
        return newRecord(record, updatedSchema, updatedValue);
    }
}
Also used : Schema(org.apache.kafka.connect.data.Schema) ConnectSchema(org.apache.kafka.connect.data.ConnectSchema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) Struct(org.apache.kafka.connect.data.Struct)

Example 74 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.

the class HeaderFrom method moveSchema.

private Schema moveSchema(Schema operatingSchema) {
    Schema moveSchema = this.moveSchemaCache.get(operatingSchema);
    if (moveSchema == null) {
        final SchemaBuilder builder = SchemaUtil.copySchemaBasics(operatingSchema, SchemaBuilder.struct());
        for (Field field : operatingSchema.fields()) {
            if (!fields.contains(field.name())) {
                builder.field(field.name(), field.schema());
            }
        }
        moveSchema = builder.build();
        moveSchemaCache.put(operatingSchema, moveSchema);
    }
    return moveSchema;
}
Also used : Field(org.apache.kafka.connect.data.Field) Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder)

Example 75 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.

the class JsonConverter method asConnectSchema.

public Schema asConnectSchema(JsonNode jsonSchema) {
    if (jsonSchema.isNull())
        return null;
    Schema cached = toConnectSchemaCache.get(jsonSchema);
    if (cached != null)
        return cached;
    JsonNode schemaTypeNode = jsonSchema.get(JsonSchema.SCHEMA_TYPE_FIELD_NAME);
    if (schemaTypeNode == null || !schemaTypeNode.isTextual())
        throw new DataException("Schema must contain 'type' field");
    final SchemaBuilder builder;
    switch(schemaTypeNode.textValue()) {
        case JsonSchema.BOOLEAN_TYPE_NAME:
            builder = SchemaBuilder.bool();
            break;
        case JsonSchema.INT8_TYPE_NAME:
            builder = SchemaBuilder.int8();
            break;
        case JsonSchema.INT16_TYPE_NAME:
            builder = SchemaBuilder.int16();
            break;
        case JsonSchema.INT32_TYPE_NAME:
            builder = SchemaBuilder.int32();
            break;
        case JsonSchema.INT64_TYPE_NAME:
            builder = SchemaBuilder.int64();
            break;
        case JsonSchema.FLOAT_TYPE_NAME:
            builder = SchemaBuilder.float32();
            break;
        case JsonSchema.DOUBLE_TYPE_NAME:
            builder = SchemaBuilder.float64();
            break;
        case JsonSchema.BYTES_TYPE_NAME:
            builder = SchemaBuilder.bytes();
            break;
        case JsonSchema.STRING_TYPE_NAME:
            builder = SchemaBuilder.string();
            break;
        case JsonSchema.ARRAY_TYPE_NAME:
            JsonNode elemSchema = jsonSchema.get(JsonSchema.ARRAY_ITEMS_FIELD_NAME);
            if (elemSchema == null || elemSchema.isNull())
                throw new DataException("Array schema did not specify the element type");
            builder = SchemaBuilder.array(asConnectSchema(elemSchema));
            break;
        case JsonSchema.MAP_TYPE_NAME:
            JsonNode keySchema = jsonSchema.get(JsonSchema.MAP_KEY_FIELD_NAME);
            if (keySchema == null)
                throw new DataException("Map schema did not specify the key type");
            JsonNode valueSchema = jsonSchema.get(JsonSchema.MAP_VALUE_FIELD_NAME);
            if (valueSchema == null)
                throw new DataException("Map schema did not specify the value type");
            builder = SchemaBuilder.map(asConnectSchema(keySchema), asConnectSchema(valueSchema));
            break;
        case JsonSchema.STRUCT_TYPE_NAME:
            builder = SchemaBuilder.struct();
            JsonNode fields = jsonSchema.get(JsonSchema.STRUCT_FIELDS_FIELD_NAME);
            if (fields == null || !fields.isArray())
                throw new DataException("Struct schema's \"fields\" argument is not an array.");
            for (JsonNode field : fields) {
                JsonNode jsonFieldName = field.get(JsonSchema.STRUCT_FIELD_NAME_FIELD_NAME);
                if (jsonFieldName == null || !jsonFieldName.isTextual())
                    throw new DataException("Struct schema's field name not specified properly");
                builder.field(jsonFieldName.asText(), asConnectSchema(field));
            }
            break;
        default:
            throw new DataException("Unknown schema type: " + schemaTypeNode.textValue());
    }
    JsonNode schemaOptionalNode = jsonSchema.get(JsonSchema.SCHEMA_OPTIONAL_FIELD_NAME);
    if (schemaOptionalNode != null && schemaOptionalNode.isBoolean() && schemaOptionalNode.booleanValue())
        builder.optional();
    else
        builder.required();
    JsonNode schemaNameNode = jsonSchema.get(JsonSchema.SCHEMA_NAME_FIELD_NAME);
    if (schemaNameNode != null && schemaNameNode.isTextual())
        builder.name(schemaNameNode.textValue());
    JsonNode schemaVersionNode = jsonSchema.get(JsonSchema.SCHEMA_VERSION_FIELD_NAME);
    if (schemaVersionNode != null && schemaVersionNode.isIntegralNumber()) {
        builder.version(schemaVersionNode.intValue());
    }
    JsonNode schemaDocNode = jsonSchema.get(JsonSchema.SCHEMA_DOC_FIELD_NAME);
    if (schemaDocNode != null && schemaDocNode.isTextual())
        builder.doc(schemaDocNode.textValue());
    JsonNode schemaParamsNode = jsonSchema.get(JsonSchema.SCHEMA_PARAMETERS_FIELD_NAME);
    if (schemaParamsNode != null && schemaParamsNode.isObject()) {
        Iterator<Map.Entry<String, JsonNode>> paramsIt = schemaParamsNode.fields();
        while (paramsIt.hasNext()) {
            Map.Entry<String, JsonNode> entry = paramsIt.next();
            JsonNode paramValue = entry.getValue();
            if (!paramValue.isTextual())
                throw new DataException("Schema parameters must have string values.");
            builder.parameter(entry.getKey(), paramValue.textValue());
        }
    }
    JsonNode schemaDefaultNode = jsonSchema.get(JsonSchema.SCHEMA_DEFAULT_FIELD_NAME);
    if (schemaDefaultNode != null)
        builder.defaultValue(convertToConnect(builder, schemaDefaultNode));
    Schema result = builder.build();
    toConnectSchemaCache.put(jsonSchema, result);
    return result;
}
Also used : DataException(org.apache.kafka.connect.errors.DataException) Schema(org.apache.kafka.connect.data.Schema) ConnectSchema(org.apache.kafka.connect.data.ConnectSchema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) JsonNode(com.fasterxml.jackson.databind.JsonNode) HashMap(java.util.HashMap) Map(java.util.Map) EnumMap(java.util.EnumMap)

Aggregations

SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)88 Schema (org.apache.kafka.connect.data.Schema)40 Test (org.junit.Test)40 Struct (org.apache.kafka.connect.data.Struct)23 Field (org.apache.kafka.connect.data.Field)13 SourceRecord (org.apache.kafka.connect.source.SourceRecord)13 Test (org.junit.jupiter.api.Test)9 ConnectSchema (org.apache.kafka.connect.data.ConnectSchema)6 BigDecimal (java.math.BigDecimal)5 Date (java.util.Date)5 Requirements.requireStruct (org.apache.kafka.connect.transforms.util.Requirements.requireStruct)5 ArrayList (java.util.ArrayList)4 HashMap (java.util.HashMap)4 DataException (org.apache.kafka.connect.errors.DataException)4 KsqlStream (io.confluent.ksql.metastore.KsqlStream)3 KsqlTopic (io.confluent.ksql.metastore.KsqlTopic)3 Expression (io.confluent.ksql.parser.tree.Expression)3 JsonNode (com.fasterxml.jackson.databind.JsonNode)2 SelectItem (io.confluent.ksql.parser.tree.SelectItem)2 SingleColumn (io.confluent.ksql.parser.tree.SingleColumn)2