Search in sources :

Example 66 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project connect-utils by jcustenborder.

the class SchemaHelper method builder.

public static SchemaBuilder builder(Object input) {
    Preconditions.checkNotNull(input, "input cannot be null.");
    final SchemaBuilder builder;
    if (PRIMITIVES.containsKey(input.getClass())) {
        final Schema.Type type = PRIMITIVES.get(input.getClass());
        builder = SchemaBuilder.type(type);
    } else if (input instanceof Date) {
        builder = Timestamp.builder();
    } else if (input instanceof BigDecimal) {
        builder = Decimal.builder(((BigDecimal) input).scale());
    } else {
        throw new UnsupportedOperationException(String.format("Unsupported Type: %s", input.getClass()));
    }
    return builder.optional();
}
Also used : Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) Date(java.util.Date) BigDecimal(java.math.BigDecimal)

Example 67 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project apache-kafka-on-k8s by banzaicloud.

the class JsonConverter method asConnectSchema.

public Schema asConnectSchema(JsonNode jsonSchema) {
    if (jsonSchema.isNull())
        return null;
    Schema cached = toConnectSchemaCache.get(jsonSchema);
    if (cached != null)
        return cached;
    JsonNode schemaTypeNode = jsonSchema.get(JsonSchema.SCHEMA_TYPE_FIELD_NAME);
    if (schemaTypeNode == null || !schemaTypeNode.isTextual())
        throw new DataException("Schema must contain 'type' field");
    final SchemaBuilder builder;
    switch(schemaTypeNode.textValue()) {
        case JsonSchema.BOOLEAN_TYPE_NAME:
            builder = SchemaBuilder.bool();
            break;
        case JsonSchema.INT8_TYPE_NAME:
            builder = SchemaBuilder.int8();
            break;
        case JsonSchema.INT16_TYPE_NAME:
            builder = SchemaBuilder.int16();
            break;
        case JsonSchema.INT32_TYPE_NAME:
            builder = SchemaBuilder.int32();
            break;
        case JsonSchema.INT64_TYPE_NAME:
            builder = SchemaBuilder.int64();
            break;
        case JsonSchema.FLOAT_TYPE_NAME:
            builder = SchemaBuilder.float32();
            break;
        case JsonSchema.DOUBLE_TYPE_NAME:
            builder = SchemaBuilder.float64();
            break;
        case JsonSchema.BYTES_TYPE_NAME:
            builder = SchemaBuilder.bytes();
            break;
        case JsonSchema.STRING_TYPE_NAME:
            builder = SchemaBuilder.string();
            break;
        case JsonSchema.ARRAY_TYPE_NAME:
            JsonNode elemSchema = jsonSchema.get(JsonSchema.ARRAY_ITEMS_FIELD_NAME);
            if (elemSchema == null || elemSchema.isNull())
                throw new DataException("Array schema did not specify the element type");
            builder = SchemaBuilder.array(asConnectSchema(elemSchema));
            break;
        case JsonSchema.MAP_TYPE_NAME:
            JsonNode keySchema = jsonSchema.get(JsonSchema.MAP_KEY_FIELD_NAME);
            if (keySchema == null)
                throw new DataException("Map schema did not specify the key type");
            JsonNode valueSchema = jsonSchema.get(JsonSchema.MAP_VALUE_FIELD_NAME);
            if (valueSchema == null)
                throw new DataException("Map schema did not specify the value type");
            builder = SchemaBuilder.map(asConnectSchema(keySchema), asConnectSchema(valueSchema));
            break;
        case JsonSchema.STRUCT_TYPE_NAME:
            builder = SchemaBuilder.struct();
            JsonNode fields = jsonSchema.get(JsonSchema.STRUCT_FIELDS_FIELD_NAME);
            if (fields == null || !fields.isArray())
                throw new DataException("Struct schema's \"fields\" argument is not an array.");
            for (JsonNode field : fields) {
                JsonNode jsonFieldName = field.get(JsonSchema.STRUCT_FIELD_NAME_FIELD_NAME);
                if (jsonFieldName == null || !jsonFieldName.isTextual())
                    throw new DataException("Struct schema's field name not specified properly");
                builder.field(jsonFieldName.asText(), asConnectSchema(field));
            }
            break;
        default:
            throw new DataException("Unknown schema type: " + schemaTypeNode.textValue());
    }
    JsonNode schemaOptionalNode = jsonSchema.get(JsonSchema.SCHEMA_OPTIONAL_FIELD_NAME);
    if (schemaOptionalNode != null && schemaOptionalNode.isBoolean() && schemaOptionalNode.booleanValue())
        builder.optional();
    else
        builder.required();
    JsonNode schemaNameNode = jsonSchema.get(JsonSchema.SCHEMA_NAME_FIELD_NAME);
    if (schemaNameNode != null && schemaNameNode.isTextual())
        builder.name(schemaNameNode.textValue());
    JsonNode schemaVersionNode = jsonSchema.get(JsonSchema.SCHEMA_VERSION_FIELD_NAME);
    if (schemaVersionNode != null && schemaVersionNode.isIntegralNumber()) {
        builder.version(schemaVersionNode.intValue());
    }
    JsonNode schemaDocNode = jsonSchema.get(JsonSchema.SCHEMA_DOC_FIELD_NAME);
    if (schemaDocNode != null && schemaDocNode.isTextual())
        builder.doc(schemaDocNode.textValue());
    JsonNode schemaParamsNode = jsonSchema.get(JsonSchema.SCHEMA_PARAMETERS_FIELD_NAME);
    if (schemaParamsNode != null && schemaParamsNode.isObject()) {
        Iterator<Map.Entry<String, JsonNode>> paramsIt = schemaParamsNode.fields();
        while (paramsIt.hasNext()) {
            Map.Entry<String, JsonNode> entry = paramsIt.next();
            JsonNode paramValue = entry.getValue();
            if (!paramValue.isTextual())
                throw new DataException("Schema parameters must have string values.");
            builder.parameter(entry.getKey(), paramValue.textValue());
        }
    }
    JsonNode schemaDefaultNode = jsonSchema.get(JsonSchema.SCHEMA_DEFAULT_FIELD_NAME);
    if (schemaDefaultNode != null)
        builder.defaultValue(convertToConnect(builder, schemaDefaultNode));
    Schema result = builder.build();
    toConnectSchemaCache.put(jsonSchema, result);
    return result;
}
Also used : DataException(org.apache.kafka.connect.errors.DataException) Schema(org.apache.kafka.connect.data.Schema) ConnectSchema(org.apache.kafka.connect.data.ConnectSchema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) JsonNode(com.fasterxml.jackson.databind.JsonNode) HashMap(java.util.HashMap) Map(java.util.Map) EnumMap(java.util.EnumMap)

Example 68 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project apache-kafka-on-k8s by banzaicloud.

the class Cast method getOrBuildSchema.

private Schema getOrBuildSchema(Schema valueSchema) {
    Schema updatedSchema = schemaUpdateCache.get(valueSchema);
    if (updatedSchema != null)
        return updatedSchema;
    final SchemaBuilder builder;
    if (wholeValueCastType != null) {
        builder = SchemaUtil.copySchemaBasics(valueSchema, convertFieldType(wholeValueCastType));
    } else {
        builder = SchemaUtil.copySchemaBasics(valueSchema, SchemaBuilder.struct());
        for (Field field : valueSchema.fields()) {
            SchemaBuilder fieldBuilder = convertFieldType(casts.containsKey(field.name()) ? casts.get(field.name()) : field.schema().type());
            if (field.schema().isOptional())
                fieldBuilder.optional();
            if (field.schema().defaultValue() != null)
                fieldBuilder.defaultValue(castValueToType(field.schema().defaultValue(), fieldBuilder.type()));
            builder.field(field.name(), fieldBuilder.build());
        }
    }
    if (valueSchema.isOptional())
        builder.optional();
    if (valueSchema.defaultValue() != null)
        builder.defaultValue(castValueToType(valueSchema.defaultValue(), builder.type()));
    updatedSchema = builder.build();
    schemaUpdateCache.put(valueSchema, updatedSchema);
    return updatedSchema;
}
Also used : Field(org.apache.kafka.connect.data.Field) Schema(org.apache.kafka.connect.data.Schema) ConnectSchema(org.apache.kafka.connect.data.ConnectSchema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder)

Example 69 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project apache-kafka-on-k8s by banzaicloud.

the class Flatten method applyWithSchema.

private R applyWithSchema(R record) {
    final Struct value = requireStruct(operatingValue(record), PURPOSE);
    Schema updatedSchema = schemaUpdateCache.get(value.schema());
    if (updatedSchema == null) {
        final SchemaBuilder builder = SchemaUtil.copySchemaBasics(value.schema(), SchemaBuilder.struct());
        Struct defaultValue = (Struct) value.schema().defaultValue();
        buildUpdatedSchema(value.schema(), "", builder, value.schema().isOptional(), defaultValue);
        updatedSchema = builder.build();
        schemaUpdateCache.put(value.schema(), updatedSchema);
    }
    final Struct updatedValue = new Struct(updatedSchema);
    buildWithSchema(value, "", updatedValue);
    return newRecord(record, updatedSchema, updatedValue);
}
Also used : Schema(org.apache.kafka.connect.data.Schema) ConnectSchema(org.apache.kafka.connect.data.ConnectSchema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) Requirements.requireStruct(org.apache.kafka.connect.transforms.util.Requirements.requireStruct) Struct(org.apache.kafka.connect.data.Struct)

Example 70 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project apache-kafka-on-k8s by banzaicloud.

the class CastTest method castFieldsWithSchema.

@Test
public void castFieldsWithSchema() {
    xformValue.configure(Collections.singletonMap(Cast.SPEC_CONFIG, "int8:int16,int16:int32,int32:int64,int64:boolean,float32:float64,float64:boolean,boolean:int8,string:int32,optional:int32"));
    // Include an optional fields and fields with defaults to validate their values are passed through properly
    SchemaBuilder builder = SchemaBuilder.struct();
    builder.field("int8", Schema.INT8_SCHEMA);
    builder.field("int16", Schema.OPTIONAL_INT16_SCHEMA);
    builder.field("int32", SchemaBuilder.int32().defaultValue(2).build());
    builder.field("int64", Schema.INT64_SCHEMA);
    builder.field("float32", Schema.FLOAT32_SCHEMA);
    // Default value here ensures we correctly convert default values
    builder.field("float64", SchemaBuilder.float64().defaultValue(-1.125).build());
    builder.field("boolean", Schema.BOOLEAN_SCHEMA);
    builder.field("string", Schema.STRING_SCHEMA);
    builder.field("optional", Schema.OPTIONAL_FLOAT32_SCHEMA);
    Schema supportedTypesSchema = builder.build();
    Struct recordValue = new Struct(supportedTypesSchema);
    recordValue.put("int8", (byte) 8);
    recordValue.put("int16", (short) 16);
    recordValue.put("int32", 32);
    recordValue.put("int64", (long) 64);
    recordValue.put("float32", 32.f);
    recordValue.put("float64", -64.);
    recordValue.put("boolean", true);
    recordValue.put("string", "42");
    // optional field intentionally omitted
    SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, supportedTypesSchema, recordValue));
    assertEquals((short) 8, ((Struct) transformed.value()).get("int8"));
    assertTrue(((Struct) transformed.value()).schema().field("int16").schema().isOptional());
    assertEquals(16, ((Struct) transformed.value()).get("int16"));
    assertEquals((long) 32, ((Struct) transformed.value()).get("int32"));
    assertEquals(2L, ((Struct) transformed.value()).schema().field("int32").schema().defaultValue());
    assertEquals(true, ((Struct) transformed.value()).get("int64"));
    assertEquals(32., ((Struct) transformed.value()).get("float32"));
    assertEquals(true, ((Struct) transformed.value()).get("float64"));
    assertEquals(true, ((Struct) transformed.value()).schema().field("float64").schema().defaultValue());
    assertEquals((byte) 1, ((Struct) transformed.value()).get("boolean"));
    assertEquals(42, ((Struct) transformed.value()).get("string"));
    assertNull(((Struct) transformed.value()).get("optional"));
}
Also used : Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Struct(org.apache.kafka.connect.data.Struct) Test(org.junit.Test)

Aggregations

SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)88 Schema (org.apache.kafka.connect.data.Schema)40 Test (org.junit.Test)40 Struct (org.apache.kafka.connect.data.Struct)23 Field (org.apache.kafka.connect.data.Field)13 SourceRecord (org.apache.kafka.connect.source.SourceRecord)13 Test (org.junit.jupiter.api.Test)9 ConnectSchema (org.apache.kafka.connect.data.ConnectSchema)6 BigDecimal (java.math.BigDecimal)5 Date (java.util.Date)5 Requirements.requireStruct (org.apache.kafka.connect.transforms.util.Requirements.requireStruct)5 ArrayList (java.util.ArrayList)4 HashMap (java.util.HashMap)4 DataException (org.apache.kafka.connect.errors.DataException)4 KsqlStream (io.confluent.ksql.metastore.KsqlStream)3 KsqlTopic (io.confluent.ksql.metastore.KsqlTopic)3 Expression (io.confluent.ksql.parser.tree.Expression)3 JsonNode (com.fasterxml.jackson.databind.JsonNode)2 SelectItem (io.confluent.ksql.parser.tree.SelectItem)2 SingleColumn (io.confluent.ksql.parser.tree.SingleColumn)2