use of org.apache.kafka.connect.data.SchemaBuilder in project apache-kafka-on-k8s by banzaicloud.
the class TimestampConverter method applyWithSchema.
private R applyWithSchema(R record) {
final Schema schema = operatingSchema(record);
if (config.field.isEmpty()) {
Object value = operatingValue(record);
// New schema is determined by the requested target timestamp type
Schema updatedSchema = TRANSLATORS.get(config.type).typeSchema();
return newRecord(record, updatedSchema, convertTimestamp(value, timestampTypeFromSchema(schema)));
} else {
final Struct value = requireStruct(operatingValue(record), PURPOSE);
Schema updatedSchema = schemaUpdateCache.get(value.schema());
if (updatedSchema == null) {
SchemaBuilder builder = SchemaUtil.copySchemaBasics(schema, SchemaBuilder.struct());
for (Field field : schema.fields()) {
if (field.name().equals(config.field)) {
builder.field(field.name(), TRANSLATORS.get(config.type).typeSchema());
} else {
builder.field(field.name(), field.schema());
}
}
if (schema.isOptional())
builder.optional();
if (schema.defaultValue() != null) {
Struct updatedDefaultValue = applyValueWithSchema((Struct) schema.defaultValue(), builder);
builder.defaultValue(updatedDefaultValue);
}
updatedSchema = builder.build();
schemaUpdateCache.put(schema, updatedSchema);
}
Struct updatedValue = applyValueWithSchema(value, updatedSchema);
return newRecord(record, updatedSchema, updatedValue);
}
}
use of org.apache.kafka.connect.data.SchemaBuilder in project apache-kafka-on-k8s by banzaicloud.
the class FlattenTest method testOptionalAndDefaultValuesNested.
@Test
public void testOptionalAndDefaultValuesNested() {
// If we have a nested structure where an entire sub-Struct is optional, all flattened fields generated from its
// children should also be optional. Similarly, if the parent Struct has a default value, the default value for
// the flattened field
xformValue.configure(Collections.<String, String>emptyMap());
SchemaBuilder builder = SchemaBuilder.struct().optional();
builder.field("req_field", Schema.STRING_SCHEMA);
builder.field("opt_field", SchemaBuilder.string().optional().defaultValue("child_default").build());
Struct childDefaultValue = new Struct(builder);
childDefaultValue.put("req_field", "req_default");
builder.defaultValue(childDefaultValue);
Schema schema = builder.build();
// Intentionally leave this entire value empty since it is optional
Struct value = new Struct(schema);
SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, schema, value));
assertNotNull(transformed);
Schema transformedSchema = transformed.valueSchema();
assertEquals(Schema.Type.STRUCT, transformedSchema.type());
assertEquals(2, transformedSchema.fields().size());
// Required field should pick up both being optional and the default value from the parent
Schema transformedReqFieldSchema = SchemaBuilder.string().optional().defaultValue("req_default").build();
assertEquals(transformedReqFieldSchema, transformedSchema.field("req_field").schema());
// The optional field should still be optional but should have picked up the default value. However, since
// the parent didn't specify the default explicitly, we should still be using the field's normal default
Schema transformedOptFieldSchema = SchemaBuilder.string().optional().defaultValue("child_default").build();
assertEquals(transformedOptFieldSchema, transformedSchema.field("opt_field").schema());
}
use of org.apache.kafka.connect.data.SchemaBuilder in project apache-kafka-on-k8s by banzaicloud.
the class FlattenTest method testNestedStruct.
@Test
public void testNestedStruct() {
xformValue.configure(Collections.<String, String>emptyMap());
SchemaBuilder builder = SchemaBuilder.struct();
builder.field("int8", Schema.INT8_SCHEMA);
builder.field("int16", Schema.INT16_SCHEMA);
builder.field("int32", Schema.INT32_SCHEMA);
builder.field("int64", Schema.INT64_SCHEMA);
builder.field("float32", Schema.FLOAT32_SCHEMA);
builder.field("float64", Schema.FLOAT64_SCHEMA);
builder.field("boolean", Schema.BOOLEAN_SCHEMA);
builder.field("string", Schema.STRING_SCHEMA);
builder.field("bytes", Schema.BYTES_SCHEMA);
Schema supportedTypesSchema = builder.build();
builder = SchemaBuilder.struct();
builder.field("B", supportedTypesSchema);
Schema oneLevelNestedSchema = builder.build();
builder = SchemaBuilder.struct();
builder.field("A", oneLevelNestedSchema);
Schema twoLevelNestedSchema = builder.build();
Struct supportedTypes = new Struct(supportedTypesSchema);
supportedTypes.put("int8", (byte) 8);
supportedTypes.put("int16", (short) 16);
supportedTypes.put("int32", 32);
supportedTypes.put("int64", (long) 64);
supportedTypes.put("float32", 32.f);
supportedTypes.put("float64", 64.);
supportedTypes.put("boolean", true);
supportedTypes.put("string", "stringy");
supportedTypes.put("bytes", "bytes".getBytes());
Struct oneLevelNestedStruct = new Struct(oneLevelNestedSchema);
oneLevelNestedStruct.put("B", supportedTypes);
Struct twoLevelNestedStruct = new Struct(twoLevelNestedSchema);
twoLevelNestedStruct.put("A", oneLevelNestedStruct);
SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, twoLevelNestedSchema, twoLevelNestedStruct));
assertEquals(Schema.Type.STRUCT, transformed.valueSchema().type());
Struct transformedStruct = (Struct) transformed.value();
assertEquals(9, transformedStruct.schema().fields().size());
assertEquals(8, (byte) transformedStruct.getInt8("A.B.int8"));
assertEquals(16, (short) transformedStruct.getInt16("A.B.int16"));
assertEquals(32, (int) transformedStruct.getInt32("A.B.int32"));
assertEquals(64L, (long) transformedStruct.getInt64("A.B.int64"));
assertEquals(32.f, transformedStruct.getFloat32("A.B.float32"), 0.f);
assertEquals(64., transformedStruct.getFloat64("A.B.float64"), 0.);
assertEquals(true, transformedStruct.getBoolean("A.B.boolean"));
assertEquals("stringy", transformedStruct.getString("A.B.string"));
assertArrayEquals("bytes".getBytes(), transformedStruct.getBytes("A.B.bytes"));
}
use of org.apache.kafka.connect.data.SchemaBuilder in project apache-kafka-on-k8s by banzaicloud.
the class FlattenTest method testOptionalFieldStruct.
@Test
public void testOptionalFieldStruct() {
xformValue.configure(Collections.<String, String>emptyMap());
SchemaBuilder builder = SchemaBuilder.struct();
builder.field("opt_int32", Schema.OPTIONAL_INT32_SCHEMA);
Schema supportedTypesSchema = builder.build();
builder = SchemaBuilder.struct();
builder.field("B", supportedTypesSchema);
Schema oneLevelNestedSchema = builder.build();
Struct supportedTypes = new Struct(supportedTypesSchema);
supportedTypes.put("opt_int32", null);
Struct oneLevelNestedStruct = new Struct(oneLevelNestedSchema);
oneLevelNestedStruct.put("B", supportedTypes);
SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, oneLevelNestedSchema, oneLevelNestedStruct));
assertEquals(Schema.Type.STRUCT, transformed.valueSchema().type());
Struct transformedStruct = (Struct) transformed.value();
assertNull(transformedStruct.get("B.opt_int32"));
}
use of org.apache.kafka.connect.data.SchemaBuilder in project kafka-connect-storage-cloud by confluentinc.
the class DataWriterAvroTest method createEnumSchema.
public Schema createEnumSchema() {
// Enums are just converted to strings, original enum is preserved in parameters
SchemaBuilder builder = SchemaBuilder.string().name("TestEnum");
builder.parameter(CONNECT_ENUM_DOC_PROP, null);
builder.parameter(AVRO_TYPE_ENUM, "TestEnum");
for (String enumSymbol : new String[] { "foo", "bar", "baz" }) {
builder.parameter(AVRO_TYPE_ENUM + "." + enumSymbol, enumSymbol);
}
return builder.build();
}
Aggregations