Search in sources :

Example 31 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project debezium by debezium.

the class UnwrapFromMongoDbEnvelope method apply.

@Override
public R apply(R r) {
    SchemaBuilder valueSchemaBuilder = SchemaBuilder.struct();
    SchemaBuilder keySchemabuilder = SchemaBuilder.struct();
    BsonDocument valueDocument = null;
    final R afterRecord = afterExtractor.apply(r);
    final R key = keyExtractor.apply(r);
    BsonDocument keyDocument = BsonDocument.parse("{ \"id\" : " + key.key().toString() + "}");
    if (afterRecord.value() == null) {
        final R patchRecord = patchExtractor.apply(r);
        // update
        if (patchRecord.value() != null) {
            valueDocument = BsonDocument.parse(patchRecord.value().toString());
            valueDocument = valueDocument.getDocument("$set");
            if (!valueDocument.containsKey("id")) {
                valueDocument.append("id", keyDocument.get("id"));
            }
        } else // delete
        {
            valueDocument = new BsonDocument();
        }
    // insert
    } else {
        valueDocument = BsonDocument.parse(afterRecord.value().toString());
        valueDocument.remove("_id");
        valueDocument.append("id", keyDocument.get("id"));
    }
    Set<Entry<String, BsonValue>> valuePairs = valueDocument.entrySet();
    Set<Entry<String, BsonValue>> keyPairs = keyDocument.entrySet();
    for (Entry<String, BsonValue> valuePairsforSchema : valuePairs) {
        if (valuePairsforSchema.getKey().toString().equalsIgnoreCase("$set")) {
            BsonDocument val1 = BsonDocument.parse(valuePairsforSchema.getValue().toString());
            Set<Entry<String, BsonValue>> keyValuesforSetSchema = val1.entrySet();
            for (Entry<String, BsonValue> keyValuesforSetSchemaEntry : keyValuesforSetSchema) {
                MongoDataConverter.addFieldSchema(keyValuesforSetSchemaEntry, valueSchemaBuilder);
            }
        } else {
            MongoDataConverter.addFieldSchema(valuePairsforSchema, valueSchemaBuilder);
        }
    }
    for (Entry<String, BsonValue> keyPairsforSchema : keyPairs) {
        MongoDataConverter.addFieldSchema(keyPairsforSchema, keySchemabuilder);
    }
    Schema finalValueSchema = valueSchemaBuilder.build();
    Struct finalValueStruct = new Struct(finalValueSchema);
    Schema finalKeySchema = keySchemabuilder.build();
    Struct finalKeyStruct = new Struct(finalKeySchema);
    for (Entry<String, BsonValue> valuePairsforStruct : valuePairs) {
        if (valuePairsforStruct.getKey().toString().equalsIgnoreCase("$set")) {
            BsonDocument val1 = BsonDocument.parse(valuePairsforStruct.getValue().toString());
            Set<Entry<String, BsonValue>> keyvalueforSetStruct = val1.entrySet();
            for (Entry<String, BsonValue> keyvalueforSetStructEntry : keyvalueforSetStruct) {
                MongoDataConverter.convertRecord(keyvalueforSetStructEntry, finalValueSchema, finalValueStruct);
            }
        } else {
            MongoDataConverter.convertRecord(valuePairsforStruct, finalValueSchema, finalValueStruct);
        }
    }
    for (Entry<String, BsonValue> keyPairsforStruct : keyPairs) {
        MongoDataConverter.convertRecord(keyPairsforStruct, finalKeySchema, finalKeyStruct);
    }
    if (finalValueSchema.fields().isEmpty()) {
        return r.newRecord(r.topic(), r.kafkaPartition(), finalKeySchema, finalKeyStruct, null, null, r.timestamp());
    } else {
        return r.newRecord(r.topic(), r.kafkaPartition(), finalKeySchema, finalKeyStruct, finalValueSchema, finalValueStruct, r.timestamp());
    }
}
Also used : Entry(java.util.Map.Entry) BsonDocument(org.bson.BsonDocument) Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) BsonValue(org.bson.BsonValue) Struct(org.apache.kafka.connect.data.Struct)

Example 32 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.

the class CastTest method castLogicalToString.

@Test
public void castLogicalToString() {
    Date date = new Date(MILLIS_PER_DAY);
    Date time = new Date(MILLIS_PER_HOUR);
    Date timestamp = new Date();
    xformValue.configure(Collections.singletonMap(Cast.SPEC_CONFIG, "date:string,decimal:string,time:string,timestamp:string"));
    SchemaBuilder builder = SchemaBuilder.struct();
    builder.field("date", org.apache.kafka.connect.data.Date.SCHEMA);
    builder.field("decimal", Decimal.schema(new BigDecimal(1982).scale()));
    builder.field("time", Time.SCHEMA);
    builder.field("timestamp", Timestamp.SCHEMA);
    Schema supportedTypesSchema = builder.build();
    Struct recordValue = new Struct(supportedTypesSchema);
    recordValue.put("date", date);
    recordValue.put("decimal", new BigDecimal(1982));
    recordValue.put("time", time);
    recordValue.put("timestamp", timestamp);
    SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, supportedTypesSchema, recordValue));
    assertEquals(Values.dateFormatFor(date).format(date), ((Struct) transformed.value()).get("date"));
    assertEquals("1982", ((Struct) transformed.value()).get("decimal"));
    assertEquals(Values.dateFormatFor(time).format(time), ((Struct) transformed.value()).get("time"));
    assertEquals(Values.dateFormatFor(timestamp).format(timestamp), ((Struct) transformed.value()).get("timestamp"));
    Schema transformedSchema = ((Struct) transformed.value()).schema();
    assertEquals(Type.STRING, transformedSchema.field("date").schema().type());
    assertEquals(Type.STRING, transformedSchema.field("decimal").schema().type());
    assertEquals(Type.STRING, transformedSchema.field("time").schema().type());
    assertEquals(Type.STRING, transformedSchema.field("timestamp").schema().type());
}
Also used : Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Date(java.util.Date) BigDecimal(java.math.BigDecimal) Struct(org.apache.kafka.connect.data.Struct) Test(org.junit.jupiter.api.Test)

Example 33 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.

the class FlattenTest method testOptionalFieldStruct.

@Test
public void testOptionalFieldStruct() {
    xformValue.configure(Collections.<String, String>emptyMap());
    SchemaBuilder builder = SchemaBuilder.struct();
    builder.field("opt_int32", Schema.OPTIONAL_INT32_SCHEMA);
    Schema supportedTypesSchema = builder.build();
    builder = SchemaBuilder.struct();
    builder.field("B", supportedTypesSchema);
    Schema oneLevelNestedSchema = builder.build();
    Struct supportedTypes = new Struct(supportedTypesSchema);
    supportedTypes.put("opt_int32", null);
    Struct oneLevelNestedStruct = new Struct(oneLevelNestedSchema);
    oneLevelNestedStruct.put("B", supportedTypes);
    SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, oneLevelNestedSchema, oneLevelNestedStruct));
    assertEquals(Schema.Type.STRUCT, transformed.valueSchema().type());
    Struct transformedStruct = (Struct) transformed.value();
    assertNull(transformedStruct.get("B.opt_int32"));
}
Also used : Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Struct(org.apache.kafka.connect.data.Struct) Test(org.junit.jupiter.api.Test)

Example 34 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.

the class FlattenTest method testNestedStruct.

@Test
public void testNestedStruct() {
    xformValue.configure(Collections.<String, String>emptyMap());
    SchemaBuilder builder = SchemaBuilder.struct();
    builder.field("int8", Schema.INT8_SCHEMA);
    builder.field("int16", Schema.INT16_SCHEMA);
    builder.field("int32", Schema.INT32_SCHEMA);
    builder.field("int64", Schema.INT64_SCHEMA);
    builder.field("float32", Schema.FLOAT32_SCHEMA);
    builder.field("float64", Schema.FLOAT64_SCHEMA);
    builder.field("boolean", Schema.BOOLEAN_SCHEMA);
    builder.field("string", Schema.STRING_SCHEMA);
    builder.field("bytes", Schema.BYTES_SCHEMA);
    Schema supportedTypesSchema = builder.build();
    builder = SchemaBuilder.struct();
    builder.field("B", supportedTypesSchema);
    Schema oneLevelNestedSchema = builder.build();
    builder = SchemaBuilder.struct();
    builder.field("A", oneLevelNestedSchema);
    Schema twoLevelNestedSchema = builder.build();
    Struct supportedTypes = new Struct(supportedTypesSchema);
    supportedTypes.put("int8", (byte) 8);
    supportedTypes.put("int16", (short) 16);
    supportedTypes.put("int32", 32);
    supportedTypes.put("int64", (long) 64);
    supportedTypes.put("float32", 32.f);
    supportedTypes.put("float64", 64.);
    supportedTypes.put("boolean", true);
    supportedTypes.put("string", "stringy");
    supportedTypes.put("bytes", "bytes".getBytes());
    Struct oneLevelNestedStruct = new Struct(oneLevelNestedSchema);
    oneLevelNestedStruct.put("B", supportedTypes);
    Struct twoLevelNestedStruct = new Struct(twoLevelNestedSchema);
    twoLevelNestedStruct.put("A", oneLevelNestedStruct);
    SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, twoLevelNestedSchema, twoLevelNestedStruct));
    assertEquals(Schema.Type.STRUCT, transformed.valueSchema().type());
    Struct transformedStruct = (Struct) transformed.value();
    assertEquals(9, transformedStruct.schema().fields().size());
    assertEquals(8, (byte) transformedStruct.getInt8("A.B.int8"));
    assertEquals(16, (short) transformedStruct.getInt16("A.B.int16"));
    assertEquals(32, (int) transformedStruct.getInt32("A.B.int32"));
    assertEquals(64L, (long) transformedStruct.getInt64("A.B.int64"));
    assertEquals(32.f, transformedStruct.getFloat32("A.B.float32"), 0.f);
    assertEquals(64., transformedStruct.getFloat64("A.B.float64"), 0.);
    assertEquals(true, transformedStruct.getBoolean("A.B.boolean"));
    assertEquals("stringy", transformedStruct.getString("A.B.string"));
    assertArrayEquals("bytes".getBytes(), transformedStruct.getBytes("A.B.bytes"));
}
Also used : Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Struct(org.apache.kafka.connect.data.Struct) Test(org.junit.jupiter.api.Test)

Example 35 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.

the class FlattenTest method testOptionalAndDefaultValuesNested.

@Test
public void testOptionalAndDefaultValuesNested() {
    // If we have a nested structure where an entire sub-Struct is optional, all flattened fields generated from its
    // children should also be optional. Similarly, if the parent Struct has a default value, the default value for
    // the flattened field
    xformValue.configure(Collections.<String, String>emptyMap());
    SchemaBuilder builder = SchemaBuilder.struct().optional();
    builder.field("req_field", Schema.STRING_SCHEMA);
    builder.field("opt_field", SchemaBuilder.string().optional().defaultValue("child_default").build());
    Struct childDefaultValue = new Struct(builder);
    childDefaultValue.put("req_field", "req_default");
    builder.defaultValue(childDefaultValue);
    Schema schema = builder.build();
    // Intentionally leave this entire value empty since it is optional
    Struct value = new Struct(schema);
    SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, schema, value));
    assertNotNull(transformed);
    Schema transformedSchema = transformed.valueSchema();
    assertEquals(Schema.Type.STRUCT, transformedSchema.type());
    assertEquals(2, transformedSchema.fields().size());
    // Required field should pick up both being optional and the default value from the parent
    Schema transformedReqFieldSchema = SchemaBuilder.string().optional().defaultValue("req_default").build();
    assertEquals(transformedReqFieldSchema, transformedSchema.field("req_field").schema());
    // The optional field should still be optional but should have picked up the default value. However, since
    // the parent didn't specify the default explicitly, we should still be using the field's normal default
    Schema transformedOptFieldSchema = SchemaBuilder.string().optional().defaultValue("child_default").build();
    assertEquals(transformedOptFieldSchema, transformedSchema.field("opt_field").schema());
}
Also used : Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Struct(org.apache.kafka.connect.data.Struct) Test(org.junit.jupiter.api.Test)

Aggregations

SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)84 Test (org.junit.Test)40 Schema (org.apache.kafka.connect.data.Schema)36 Struct (org.apache.kafka.connect.data.Struct)21 Field (org.apache.kafka.connect.data.Field)13 SourceRecord (org.apache.kafka.connect.source.SourceRecord)13 Test (org.junit.jupiter.api.Test)8 ConnectSchema (org.apache.kafka.connect.data.ConnectSchema)6 BigDecimal (java.math.BigDecimal)5 Date (java.util.Date)5 Requirements.requireStruct (org.apache.kafka.connect.transforms.util.Requirements.requireStruct)5 ArrayList (java.util.ArrayList)4 HashMap (java.util.HashMap)4 DataException (org.apache.kafka.connect.errors.DataException)4 KsqlStream (io.confluent.ksql.metastore.KsqlStream)3 KsqlTopic (io.confluent.ksql.metastore.KsqlTopic)3 Expression (io.confluent.ksql.parser.tree.Expression)3 JsonNode (com.fasterxml.jackson.databind.JsonNode)2 SelectItem (io.confluent.ksql.parser.tree.SelectItem)2 SingleColumn (io.confluent.ksql.parser.tree.SingleColumn)2