use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.
the class CastTest method castLogicalToPrimitive.
@Test
public void castLogicalToPrimitive() {
List<String> specParts = Arrays.asList(// Cast to underlying representation
"date_to_int32:int32", // Cast to underlying representation
"timestamp_to_int64:int64", // Cast to wider datatype than underlying representation
"time_to_int64:int64", // Cast to narrower datatype with data loss
"decimal_to_int32:int32", // loss of precision casting to double
"timestamp_to_float64:float64", "null_timestamp_to_int32:int32");
Date day = new Date(MILLIS_PER_DAY);
xformValue.configure(Collections.singletonMap(Cast.SPEC_CONFIG, String.join(",", specParts)));
SchemaBuilder builder = SchemaBuilder.struct();
builder.field("date_to_int32", org.apache.kafka.connect.data.Date.SCHEMA);
builder.field("timestamp_to_int64", Timestamp.SCHEMA);
builder.field("time_to_int64", Time.SCHEMA);
builder.field("decimal_to_int32", Decimal.schema(new BigDecimal((long) Integer.MAX_VALUE + 1).scale()));
builder.field("timestamp_to_float64", Timestamp.SCHEMA);
builder.field("null_timestamp_to_int32", Timestamp.builder().optional().build());
Schema supportedTypesSchema = builder.build();
Struct recordValue = new Struct(supportedTypesSchema);
recordValue.put("date_to_int32", day);
recordValue.put("timestamp_to_int64", new Date(0));
recordValue.put("time_to_int64", new Date(1));
recordValue.put("decimal_to_int32", new BigDecimal((long) Integer.MAX_VALUE + 1));
recordValue.put("timestamp_to_float64", new Date(Long.MAX_VALUE));
recordValue.put("null_timestamp_to_int32", null);
SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, supportedTypesSchema, recordValue));
assertEquals(1, ((Struct) transformed.value()).get("date_to_int32"));
assertEquals(0L, ((Struct) transformed.value()).get("timestamp_to_int64"));
assertEquals(1L, ((Struct) transformed.value()).get("time_to_int64"));
assertEquals(Integer.MIN_VALUE, ((Struct) transformed.value()).get("decimal_to_int32"));
assertEquals(9.223372036854776E18, ((Struct) transformed.value()).get("timestamp_to_float64"));
assertNull(((Struct) transformed.value()).get("null_timestamp_to_int32"));
Schema transformedSchema = ((Struct) transformed.value()).schema();
assertEquals(Type.INT32, transformedSchema.field("date_to_int32").schema().type());
assertEquals(Type.INT64, transformedSchema.field("timestamp_to_int64").schema().type());
assertEquals(Type.INT64, transformedSchema.field("time_to_int64").schema().type());
assertEquals(Type.INT32, transformedSchema.field("decimal_to_int32").schema().type());
assertEquals(Type.FLOAT64, transformedSchema.field("timestamp_to_float64").schema().type());
assertEquals(Type.INT32, transformedSchema.field("null_timestamp_to_int32").schema().type());
}
use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.
the class FlattenTest method testOptionalStruct.
@Test
public void testOptionalStruct() {
xformValue.configure(Collections.<String, String>emptyMap());
SchemaBuilder builder = SchemaBuilder.struct().optional();
builder.field("opt_int32", Schema.OPTIONAL_INT32_SCHEMA);
Schema schema = builder.build();
SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, schema, null));
assertEquals(Schema.Type.STRUCT, transformed.valueSchema().type());
assertNull(transformed.value());
}
use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.
the class FlattenTest method testOptionalNestedStruct.
@Test
public void testOptionalNestedStruct() {
xformValue.configure(Collections.<String, String>emptyMap());
SchemaBuilder builder = SchemaBuilder.struct().optional();
builder.field("opt_int32", Schema.OPTIONAL_INT32_SCHEMA);
Schema supportedTypesSchema = builder.build();
builder = SchemaBuilder.struct();
builder.field("B", supportedTypesSchema);
Schema oneLevelNestedSchema = builder.build();
Struct oneLevelNestedStruct = new Struct(oneLevelNestedSchema);
oneLevelNestedStruct.put("B", null);
SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, oneLevelNestedSchema, oneLevelNestedStruct));
assertEquals(Schema.Type.STRUCT, transformed.valueSchema().type());
Struct transformedStruct = (Struct) transformed.value();
assertNull(transformedStruct.get("B.opt_int32"));
}
use of org.apache.kafka.connect.data.SchemaBuilder in project debezium by debezium.
the class PostgresValueConverter method createArrayConverter.
private ValueConverter createArrayConverter(Column column, Field fieldDefn) {
PostgresType arrayType = typeRegistry.get(column.nativeType());
PostgresType elementType = arrayType.getElementType();
final String elementTypeName = elementType.getName();
final String elementColumnName = column.name() + "-element";
final Column elementColumn = Column.editor().name(elementColumnName).jdbcType(elementType.getJdbcId()).nativeType(elementType.getOid()).type(elementTypeName).optional(true).scale(column.scale()).length(column.length()).create();
Schema elementSchema = schemaBuilder(elementColumn).optional().build();
final Field elementField = new Field(elementColumnName, 0, elementSchema);
final ValueConverter elementConverter = converter(elementColumn, elementField);
return data -> convertArray(column, fieldDefn, elementConverter, data);
}
use of org.apache.kafka.connect.data.SchemaBuilder in project debezium by debezium.
the class MongoDataConverter method addFieldSchema.
public static void addFieldSchema(Entry<String, BsonValue> keyValuesforSchema, SchemaBuilder builder) {
String key = keyValuesforSchema.getKey();
BsonType type = keyValuesforSchema.getValue().getBsonType();
switch(type) {
case NULL:
LOG.warn("Data type {} not currently supported", type);
break;
case STRING:
case JAVASCRIPT:
case OBJECT_ID:
case DECIMAL128:
builder.field(key, Schema.OPTIONAL_STRING_SCHEMA);
break;
case DOUBLE:
builder.field(key, Schema.OPTIONAL_FLOAT64_SCHEMA);
break;
case BINARY:
builder.field(key, Schema.OPTIONAL_BYTES_SCHEMA);
break;
case INT32:
case TIMESTAMP:
builder.field(key, Schema.OPTIONAL_INT32_SCHEMA);
break;
case INT64:
case DATE_TIME:
builder.field(key, Schema.OPTIONAL_INT64_SCHEMA);
break;
case BOOLEAN:
builder.field(key, Schema.OPTIONAL_BOOLEAN_SCHEMA);
break;
case JAVASCRIPT_WITH_SCOPE:
SchemaBuilder jswithscope = SchemaBuilder.struct();
jswithscope.field("code", Schema.OPTIONAL_STRING_SCHEMA);
SchemaBuilder scope = SchemaBuilder.struct();
BsonDocument jwsDocument = keyValuesforSchema.getValue().asJavaScriptWithScope().getScope().asDocument();
for (Entry<String, BsonValue> jwsDocumentKey : jwsDocument.entrySet()) {
addFieldSchema(jwsDocumentKey, scope);
}
Schema scopeBuild = scope.build();
jswithscope.field("scope", scopeBuild).build();
builder.field(key, jswithscope);
break;
case REGULAR_EXPRESSION:
SchemaBuilder regexwop = SchemaBuilder.struct();
regexwop.field("regex", Schema.OPTIONAL_STRING_SCHEMA);
regexwop.field("options", Schema.OPTIONAL_STRING_SCHEMA);
builder.field(key, regexwop.build());
break;
case DOCUMENT:
SchemaBuilder builderDoc = SchemaBuilder.struct();
BsonDocument docs = keyValuesforSchema.getValue().asDocument();
for (Entry<String, BsonValue> doc : docs.entrySet()) {
addFieldSchema(doc, builderDoc);
}
builder.field(key, builderDoc.build());
break;
case ARRAY:
if (keyValuesforSchema.getValue().asArray().isEmpty()) {
builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_STRING_SCHEMA).build());
break;
} else {
BsonType valueType = keyValuesforSchema.getValue().asArray().get(0).getBsonType();
switch(valueType) {
case NULL:
LOG.warn("Data type {} not currently supported", valueType);
break;
case STRING:
case JAVASCRIPT:
case OBJECT_ID:
case DECIMAL128:
builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_STRING_SCHEMA).build());
break;
case DOUBLE:
builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_FLOAT64_SCHEMA).build());
break;
case BINARY:
builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_BYTES_SCHEMA).build());
break;
case INT32:
case TIMESTAMP:
builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_INT32_SCHEMA).build());
break;
case INT64:
case DATE_TIME:
builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_INT64_SCHEMA).build());
break;
case BOOLEAN:
builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_BOOLEAN_SCHEMA).build());
break;
case DOCUMENT:
SchemaBuilder documentSchemaBuilder = SchemaBuilder.struct();
BsonDocument arrayDocs = keyValuesforSchema.getValue().asArray().get(0).asDocument();
for (Entry<String, BsonValue> arrayDoc : arrayDocs.entrySet()) {
addFieldSchema(arrayDoc, documentSchemaBuilder);
}
Schema build = documentSchemaBuilder.build();
builder.field(key, SchemaBuilder.array(build).build());
break;
default:
break;
}
break;
}
default:
break;
}
}
Aggregations