use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.
the class TimestampConverter method applyWithSchema.
private R applyWithSchema(R record) {
final Schema schema = operatingSchema(record);
if (config.field.isEmpty()) {
Object value = operatingValue(record);
// New schema is determined by the requested target timestamp type
Schema updatedSchema = TRANSLATORS.get(config.type).typeSchema(schema.isOptional());
return newRecord(record, updatedSchema, convertTimestamp(value, timestampTypeFromSchema(schema)));
} else {
final Struct value = requireStructOrNull(operatingValue(record), PURPOSE);
Schema updatedSchema = schemaUpdateCache.get(schema);
if (updatedSchema == null) {
SchemaBuilder builder = SchemaUtil.copySchemaBasics(schema, SchemaBuilder.struct());
for (Field field : schema.fields()) {
if (field.name().equals(config.field)) {
builder.field(field.name(), TRANSLATORS.get(config.type).typeSchema(field.schema().isOptional()));
} else {
builder.field(field.name(), field.schema());
}
}
if (schema.isOptional())
builder.optional();
if (schema.defaultValue() != null) {
Struct updatedDefaultValue = applyValueWithSchema((Struct) schema.defaultValue(), builder);
builder.defaultValue(updatedDefaultValue);
}
updatedSchema = builder.build();
schemaUpdateCache.put(schema, updatedSchema);
}
Struct updatedValue = applyValueWithSchema(value, updatedSchema);
return newRecord(record, updatedSchema, updatedValue);
}
}
use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.
the class ValueToKey method applyWithSchema.
private R applyWithSchema(R record) {
final Struct value = requireStruct(record.value(), PURPOSE);
Schema keySchema = valueToKeySchemaCache.get(value.schema());
if (keySchema == null) {
final SchemaBuilder keySchemaBuilder = SchemaBuilder.struct();
for (String field : fields) {
final Field fieldFromValue = value.schema().field(field);
if (fieldFromValue == null) {
throw new DataException("Field does not exist: " + field);
}
keySchemaBuilder.field(field, fieldFromValue.schema());
}
keySchema = keySchemaBuilder.build();
valueToKeySchemaCache.put(value.schema(), keySchema);
}
final Struct key = new Struct(keySchema);
for (String field : fields) {
key.put(field, value.get(field));
}
return record.newRecord(record.topic(), record.kafkaPartition(), keySchema, key, value.schema(), value, record.timestamp());
}
use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.
the class HeaderFromTest method data.
public static List<Arguments> data() {
List<Arguments> result = new ArrayList<>();
for (Boolean testKeyTransform : asList(true, false)) {
result.add(Arguments.of("basic copy", testKeyTransform, new RecordBuilder().withField("field1", STRING_SCHEMA, "field1-value").withField("field2", STRING_SCHEMA, "field2-value").addHeader("header1", STRING_SCHEMA, "existing-value"), singletonList("field1"), singletonList("inserted1"), HeaderFrom.Operation.COPY, new RecordBuilder().withField("field1", STRING_SCHEMA, "field1-value").withField("field2", STRING_SCHEMA, "field2-value").addHeader("header1", STRING_SCHEMA, "existing-value").addHeader("inserted1", STRING_SCHEMA, "field1-value")));
result.add(Arguments.of("basic move", testKeyTransform, new RecordBuilder().withField("field1", STRING_SCHEMA, "field1-value").withField("field2", STRING_SCHEMA, "field2-value").addHeader("header1", STRING_SCHEMA, "existing-value"), singletonList("field1"), singletonList("inserted1"), HeaderFrom.Operation.MOVE, new RecordBuilder().withField("field2", STRING_SCHEMA, "field2-value").addHeader("header1", STRING_SCHEMA, "existing-value").addHeader("inserted1", STRING_SCHEMA, "field1-value")));
result.add(Arguments.of("copy with preexisting header", testKeyTransform, new RecordBuilder().withField("field1", STRING_SCHEMA, "field1-value").withField("field2", STRING_SCHEMA, "field2-value").addHeader("inserted1", STRING_SCHEMA, "existing-value"), singletonList("field1"), singletonList("inserted1"), HeaderFrom.Operation.COPY, new RecordBuilder().withField("field1", STRING_SCHEMA, "field1-value").withField("field2", STRING_SCHEMA, "field2-value").addHeader("inserted1", STRING_SCHEMA, "existing-value").addHeader("inserted1", STRING_SCHEMA, "field1-value")));
result.add(Arguments.of("move with preexisting header", testKeyTransform, new RecordBuilder().withField("field1", STRING_SCHEMA, "field1-value").withField("field2", STRING_SCHEMA, "field2-value").addHeader("inserted1", STRING_SCHEMA, "existing-value"), singletonList("field1"), singletonList("inserted1"), HeaderFrom.Operation.MOVE, new RecordBuilder().withField("field2", STRING_SCHEMA, "field2-value").addHeader("inserted1", STRING_SCHEMA, "existing-value").addHeader("inserted1", STRING_SCHEMA, "field1-value")));
Schema schema = new SchemaBuilder(Schema.Type.STRUCT).field("foo", STRING_SCHEMA).build();
Struct struct = new Struct(schema).put("foo", "foo-value");
result.add(Arguments.of("copy with struct value", testKeyTransform, new RecordBuilder().withField("field1", schema, struct).withField("field2", STRING_SCHEMA, "field2-value").addHeader("header1", STRING_SCHEMA, "existing-value"), singletonList("field1"), singletonList("inserted1"), HeaderFrom.Operation.COPY, new RecordBuilder().withField("field1", schema, struct).withField("field2", STRING_SCHEMA, "field2-value").addHeader("header1", STRING_SCHEMA, "existing-value").addHeader("inserted1", schema, struct)));
result.add(Arguments.of("move with struct value", testKeyTransform, new RecordBuilder().withField("field1", schema, struct).withField("field2", STRING_SCHEMA, "field2-value").addHeader("header1", STRING_SCHEMA, "existing-value"), singletonList("field1"), singletonList("inserted1"), HeaderFrom.Operation.MOVE, new RecordBuilder().withField("field2", STRING_SCHEMA, "field2-value").addHeader("header1", STRING_SCHEMA, "existing-value").addHeader("inserted1", schema, struct)));
result.add(Arguments.of("two headers from same field", testKeyTransform, new RecordBuilder().withField("field1", STRING_SCHEMA, "field1-value").withField("field2", STRING_SCHEMA, "field2-value").addHeader("header1", STRING_SCHEMA, "existing-value"), // two headers from the same field
asList("field1", "field1"), asList("inserted1", "inserted2"), HeaderFrom.Operation.MOVE, new RecordBuilder().withField("field2", STRING_SCHEMA, "field2-value").addHeader("header1", STRING_SCHEMA, "existing-value").addHeader("inserted1", STRING_SCHEMA, "field1-value").addHeader("inserted2", STRING_SCHEMA, "field1-value")));
result.add(Arguments.of("two fields to same header", testKeyTransform, new RecordBuilder().withField("field1", STRING_SCHEMA, "field1-value").withField("field2", STRING_SCHEMA, "field2-value").addHeader("header1", STRING_SCHEMA, "existing-value"), // two headers from the same field
asList("field1", "field2"), asList("inserted1", "inserted1"), HeaderFrom.Operation.MOVE, new RecordBuilder().addHeader("header1", STRING_SCHEMA, "existing-value").addHeader("inserted1", STRING_SCHEMA, "field1-value").addHeader("inserted1", STRING_SCHEMA, "field2-value")));
}
return result;
}
use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.
the class TimestampConverterTest method testWithSchemaNullFieldConversion.
private void testWithSchemaNullFieldConversion(String targetType, Schema originalSchema, Schema expectedSchema) {
Map<String, String> config = new HashMap<>();
config.put(TimestampConverter.TARGET_TYPE_CONFIG, targetType);
config.put(TimestampConverter.FORMAT_CONFIG, STRING_DATE_FMT);
config.put(TimestampConverter.FIELD_CONFIG, "ts");
xformValue.configure(config);
SchemaBuilder structSchema = SchemaBuilder.struct().field("ts", originalSchema).field("other", Schema.STRING_SCHEMA);
SchemaBuilder expectedStructSchema = SchemaBuilder.struct().field("ts", expectedSchema).field("other", Schema.STRING_SCHEMA);
Struct original = new Struct(structSchema);
original.put("ts", null);
original.put("other", "test");
// Struct field is null
SourceRecord transformed = xformValue.apply(createRecordWithSchema(structSchema.build(), original));
assertEquals(expectedStructSchema.build(), transformed.valueSchema());
assertNull(requireStruct(transformed.value(), "").get("ts"));
// entire Struct is null
transformed = xformValue.apply(createRecordWithSchema(structSchema.optional().build(), null));
assertEquals(expectedStructSchema.optional().build(), transformed.valueSchema());
assertNull(transformed.value());
}
use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.
the class CastTest method castFieldsWithSchema.
@Test
public void castFieldsWithSchema() {
Date day = new Date(MILLIS_PER_DAY);
byte[] byteArray = new byte[] { (byte) 0xFE, (byte) 0xDC, (byte) 0xBA, (byte) 0x98, 0x76, 0x54, 0x32, 0x10 };
ByteBuffer byteBuffer = ByteBuffer.wrap(Arrays.copyOf(byteArray, byteArray.length));
xformValue.configure(Collections.singletonMap(Cast.SPEC_CONFIG, "int8:int16,int16:int32,int32:int64,int64:boolean,float32:float64,float64:boolean,boolean:int8,string:int32,bigdecimal:string,date:string,optional:int32,bytes:string,byteArray:string"));
// Include an optional fields and fields with defaults to validate their values are passed through properly
SchemaBuilder builder = SchemaBuilder.struct();
builder.field("int8", Schema.INT8_SCHEMA);
builder.field("int16", Schema.OPTIONAL_INT16_SCHEMA);
builder.field("int32", SchemaBuilder.int32().defaultValue(2).build());
builder.field("int64", Schema.INT64_SCHEMA);
builder.field("float32", Schema.FLOAT32_SCHEMA);
// Default value here ensures we correctly convert default values
builder.field("float64", SchemaBuilder.float64().defaultValue(-1.125).build());
builder.field("boolean", Schema.BOOLEAN_SCHEMA);
builder.field("string", Schema.STRING_SCHEMA);
builder.field("bigdecimal", Decimal.schema(new BigDecimal(42).scale()));
builder.field("date", org.apache.kafka.connect.data.Date.SCHEMA);
builder.field("optional", Schema.OPTIONAL_FLOAT32_SCHEMA);
builder.field("timestamp", Timestamp.SCHEMA);
builder.field("bytes", Schema.BYTES_SCHEMA);
builder.field("byteArray", Schema.BYTES_SCHEMA);
Schema supportedTypesSchema = builder.build();
Struct recordValue = new Struct(supportedTypesSchema);
recordValue.put("int8", (byte) 8);
recordValue.put("int16", (short) 16);
recordValue.put("int32", 32);
recordValue.put("int64", (long) 64);
recordValue.put("float32", 32.f);
recordValue.put("float64", -64.);
recordValue.put("boolean", true);
recordValue.put("bigdecimal", new BigDecimal(42));
recordValue.put("date", day);
recordValue.put("string", "42");
recordValue.put("timestamp", new Date(0));
recordValue.put("bytes", byteBuffer);
recordValue.put("byteArray", byteArray);
// optional field intentionally omitted
SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, supportedTypesSchema, recordValue));
assertEquals((short) 8, ((Struct) transformed.value()).get("int8"));
assertTrue(((Struct) transformed.value()).schema().field("int16").schema().isOptional());
assertEquals(16, ((Struct) transformed.value()).get("int16"));
assertEquals((long) 32, ((Struct) transformed.value()).get("int32"));
assertEquals(2L, ((Struct) transformed.value()).schema().field("int32").schema().defaultValue());
assertEquals(true, ((Struct) transformed.value()).get("int64"));
assertEquals(32., ((Struct) transformed.value()).get("float32"));
assertEquals(true, ((Struct) transformed.value()).get("float64"));
assertEquals(true, ((Struct) transformed.value()).schema().field("float64").schema().defaultValue());
assertEquals((byte) 1, ((Struct) transformed.value()).get("boolean"));
assertEquals(42, ((Struct) transformed.value()).get("string"));
assertEquals("42", ((Struct) transformed.value()).get("bigdecimal"));
assertEquals(Values.dateFormatFor(day).format(day), ((Struct) transformed.value()).get("date"));
assertEquals(new Date(0), ((Struct) transformed.value()).get("timestamp"));
assertEquals("/ty6mHZUMhA=", ((Struct) transformed.value()).get("bytes"));
assertEquals("/ty6mHZUMhA=", ((Struct) transformed.value()).get("byteArray"));
assertNull(((Struct) transformed.value()).get("optional"));
Schema transformedSchema = ((Struct) transformed.value()).schema();
assertEquals(Schema.INT16_SCHEMA.type(), transformedSchema.field("int8").schema().type());
assertEquals(Schema.OPTIONAL_INT32_SCHEMA.type(), transformedSchema.field("int16").schema().type());
assertEquals(Schema.INT64_SCHEMA.type(), transformedSchema.field("int32").schema().type());
assertEquals(Schema.BOOLEAN_SCHEMA.type(), transformedSchema.field("int64").schema().type());
assertEquals(Schema.FLOAT64_SCHEMA.type(), transformedSchema.field("float32").schema().type());
assertEquals(Schema.BOOLEAN_SCHEMA.type(), transformedSchema.field("float64").schema().type());
assertEquals(Schema.INT8_SCHEMA.type(), transformedSchema.field("boolean").schema().type());
assertEquals(Schema.INT32_SCHEMA.type(), transformedSchema.field("string").schema().type());
assertEquals(Schema.STRING_SCHEMA.type(), transformedSchema.field("bigdecimal").schema().type());
assertEquals(Schema.STRING_SCHEMA.type(), transformedSchema.field("date").schema().type());
assertEquals(Schema.OPTIONAL_INT32_SCHEMA.type(), transformedSchema.field("optional").schema().type());
assertEquals(Schema.STRING_SCHEMA.type(), transformedSchema.field("bytes").schema().type());
assertEquals(Schema.STRING_SCHEMA.type(), transformedSchema.field("byteArray").schema().type());
// The following fields are not changed
assertEquals(Timestamp.SCHEMA.type(), transformedSchema.field("timestamp").schema().type());
}
Aggregations