Search in sources :

Example 81 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.

the class CastTest method castLogicalToPrimitive.

@Test
public void castLogicalToPrimitive() {
    List<String> specParts = Arrays.asList(// Cast to underlying representation
    "date_to_int32:int32", // Cast to underlying representation
    "timestamp_to_int64:int64", // Cast to wider datatype than underlying representation
    "time_to_int64:int64", // Cast to narrower datatype with data loss
    "decimal_to_int32:int32", // loss of precision casting to double
    "timestamp_to_float64:float64", "null_timestamp_to_int32:int32");
    Date day = new Date(MILLIS_PER_DAY);
    xformValue.configure(Collections.singletonMap(Cast.SPEC_CONFIG, String.join(",", specParts)));
    SchemaBuilder builder = SchemaBuilder.struct();
    builder.field("date_to_int32", org.apache.kafka.connect.data.Date.SCHEMA);
    builder.field("timestamp_to_int64", Timestamp.SCHEMA);
    builder.field("time_to_int64", Time.SCHEMA);
    builder.field("decimal_to_int32", Decimal.schema(new BigDecimal((long) Integer.MAX_VALUE + 1).scale()));
    builder.field("timestamp_to_float64", Timestamp.SCHEMA);
    builder.field("null_timestamp_to_int32", Timestamp.builder().optional().build());
    Schema supportedTypesSchema = builder.build();
    Struct recordValue = new Struct(supportedTypesSchema);
    recordValue.put("date_to_int32", day);
    recordValue.put("timestamp_to_int64", new Date(0));
    recordValue.put("time_to_int64", new Date(1));
    recordValue.put("decimal_to_int32", new BigDecimal((long) Integer.MAX_VALUE + 1));
    recordValue.put("timestamp_to_float64", new Date(Long.MAX_VALUE));
    recordValue.put("null_timestamp_to_int32", null);
    SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, supportedTypesSchema, recordValue));
    assertEquals(1, ((Struct) transformed.value()).get("date_to_int32"));
    assertEquals(0L, ((Struct) transformed.value()).get("timestamp_to_int64"));
    assertEquals(1L, ((Struct) transformed.value()).get("time_to_int64"));
    assertEquals(Integer.MIN_VALUE, ((Struct) transformed.value()).get("decimal_to_int32"));
    assertEquals(9.223372036854776E18, ((Struct) transformed.value()).get("timestamp_to_float64"));
    assertNull(((Struct) transformed.value()).get("null_timestamp_to_int32"));
    Schema transformedSchema = ((Struct) transformed.value()).schema();
    assertEquals(Type.INT32, transformedSchema.field("date_to_int32").schema().type());
    assertEquals(Type.INT64, transformedSchema.field("timestamp_to_int64").schema().type());
    assertEquals(Type.INT64, transformedSchema.field("time_to_int64").schema().type());
    assertEquals(Type.INT32, transformedSchema.field("decimal_to_int32").schema().type());
    assertEquals(Type.FLOAT64, transformedSchema.field("timestamp_to_float64").schema().type());
    assertEquals(Type.INT32, transformedSchema.field("null_timestamp_to_int32").schema().type());
}
Also used : Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Date(java.util.Date) BigDecimal(java.math.BigDecimal) Struct(org.apache.kafka.connect.data.Struct) Test(org.junit.jupiter.api.Test)

Example 82 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.

the class FlattenTest method testOptionalStruct.

@Test
public void testOptionalStruct() {
    xformValue.configure(Collections.<String, String>emptyMap());
    SchemaBuilder builder = SchemaBuilder.struct().optional();
    builder.field("opt_int32", Schema.OPTIONAL_INT32_SCHEMA);
    Schema schema = builder.build();
    SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, schema, null));
    assertEquals(Schema.Type.STRUCT, transformed.valueSchema().type());
    assertNull(transformed.value());
}
Also used : Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Test(org.junit.jupiter.api.Test)

Example 83 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project kafka by apache.

the class FlattenTest method testOptionalNestedStruct.

@Test
public void testOptionalNestedStruct() {
    xformValue.configure(Collections.<String, String>emptyMap());
    SchemaBuilder builder = SchemaBuilder.struct().optional();
    builder.field("opt_int32", Schema.OPTIONAL_INT32_SCHEMA);
    Schema supportedTypesSchema = builder.build();
    builder = SchemaBuilder.struct();
    builder.field("B", supportedTypesSchema);
    Schema oneLevelNestedSchema = builder.build();
    Struct oneLevelNestedStruct = new Struct(oneLevelNestedSchema);
    oneLevelNestedStruct.put("B", null);
    SourceRecord transformed = xformValue.apply(new SourceRecord(null, null, "topic", 0, oneLevelNestedSchema, oneLevelNestedStruct));
    assertEquals(Schema.Type.STRUCT, transformed.valueSchema().type());
    Struct transformedStruct = (Struct) transformed.value();
    assertNull(transformedStruct.get("B.opt_int32"));
}
Also used : Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Struct(org.apache.kafka.connect.data.Struct) Test(org.junit.jupiter.api.Test)

Example 84 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project debezium by debezium.

the class PostgresValueConverter method createArrayConverter.

private ValueConverter createArrayConverter(Column column, Field fieldDefn) {
    PostgresType arrayType = typeRegistry.get(column.nativeType());
    PostgresType elementType = arrayType.getElementType();
    final String elementTypeName = elementType.getName();
    final String elementColumnName = column.name() + "-element";
    final Column elementColumn = Column.editor().name(elementColumnName).jdbcType(elementType.getJdbcId()).nativeType(elementType.getOid()).type(elementTypeName).optional(true).scale(column.scale()).length(column.length()).create();
    Schema elementSchema = schemaBuilder(elementColumn).optional().build();
    final Field elementField = new Field(elementColumnName, 0, elementSchema);
    final ValueConverter elementConverter = converter(elementColumn, elementField);
    return data -> convertArray(column, fieldDefn, elementConverter, data);
}
Also used : PGInterval(org.postgresql.util.PGInterval) Json(io.debezium.data.Json) NumberConversions(io.debezium.util.NumberConversions) MicroDuration(io.debezium.time.MicroDuration) Date(java.util.Date) LocalDateTime(java.time.LocalDateTime) Schema(org.apache.kafka.connect.data.Schema) PGobject(org.postgresql.util.PGobject) BigDecimal(java.math.BigDecimal) SQLException(java.sql.SQLException) PgProto(io.debezium.connector.postgresql.proto.PgProto) ZonedTime(io.debezium.time.ZonedTime) LocalTime(java.time.LocalTime) VariableScaleDecimal(io.debezium.data.VariableScaleDecimal) BigInteger(java.math.BigInteger) ZoneOffset(java.time.ZoneOffset) Point(io.debezium.data.geometry.Point) Geography(io.debezium.data.geometry.Geography) ZonedTimestamp(io.debezium.time.ZonedTimestamp) OffsetTime(java.time.OffsetTime) Bits(io.debezium.data.Bits) Field(org.apache.kafka.connect.data.Field) TemporalPrecisionMode(io.debezium.jdbc.TemporalPrecisionMode) Instant(java.time.Instant) Collectors(java.util.stream.Collectors) TimeUnit(java.util.concurrent.TimeUnit) Decimal(org.apache.kafka.connect.data.Decimal) Geometry(io.debezium.data.geometry.Geometry) List(java.util.List) OffsetDateTime(java.time.OffsetDateTime) SpecialValueDecimal(io.debezium.data.SpecialValueDecimal) Column(io.debezium.relational.Column) JdbcValueConverters(io.debezium.jdbc.JdbcValueConverters) ValueConverter(io.debezium.relational.ValueConverter) Optional(java.util.Optional) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) Uuid(io.debezium.data.Uuid) UnsupportedEncodingException(java.io.UnsupportedEncodingException) Collections(java.util.Collections) PGpoint(org.postgresql.geometric.PGpoint) Field(org.apache.kafka.connect.data.Field) Column(io.debezium.relational.Column) Schema(org.apache.kafka.connect.data.Schema) ValueConverter(io.debezium.relational.ValueConverter)

Example 85 with SchemaBuilder

use of org.apache.kafka.connect.data.SchemaBuilder in project debezium by debezium.

the class MongoDataConverter method addFieldSchema.

public static void addFieldSchema(Entry<String, BsonValue> keyValuesforSchema, SchemaBuilder builder) {
    String key = keyValuesforSchema.getKey();
    BsonType type = keyValuesforSchema.getValue().getBsonType();
    switch(type) {
        case NULL:
            LOG.warn("Data type {} not currently supported", type);
            break;
        case STRING:
        case JAVASCRIPT:
        case OBJECT_ID:
        case DECIMAL128:
            builder.field(key, Schema.OPTIONAL_STRING_SCHEMA);
            break;
        case DOUBLE:
            builder.field(key, Schema.OPTIONAL_FLOAT64_SCHEMA);
            break;
        case BINARY:
            builder.field(key, Schema.OPTIONAL_BYTES_SCHEMA);
            break;
        case INT32:
        case TIMESTAMP:
            builder.field(key, Schema.OPTIONAL_INT32_SCHEMA);
            break;
        case INT64:
        case DATE_TIME:
            builder.field(key, Schema.OPTIONAL_INT64_SCHEMA);
            break;
        case BOOLEAN:
            builder.field(key, Schema.OPTIONAL_BOOLEAN_SCHEMA);
            break;
        case JAVASCRIPT_WITH_SCOPE:
            SchemaBuilder jswithscope = SchemaBuilder.struct();
            jswithscope.field("code", Schema.OPTIONAL_STRING_SCHEMA);
            SchemaBuilder scope = SchemaBuilder.struct();
            BsonDocument jwsDocument = keyValuesforSchema.getValue().asJavaScriptWithScope().getScope().asDocument();
            for (Entry<String, BsonValue> jwsDocumentKey : jwsDocument.entrySet()) {
                addFieldSchema(jwsDocumentKey, scope);
            }
            Schema scopeBuild = scope.build();
            jswithscope.field("scope", scopeBuild).build();
            builder.field(key, jswithscope);
            break;
        case REGULAR_EXPRESSION:
            SchemaBuilder regexwop = SchemaBuilder.struct();
            regexwop.field("regex", Schema.OPTIONAL_STRING_SCHEMA);
            regexwop.field("options", Schema.OPTIONAL_STRING_SCHEMA);
            builder.field(key, regexwop.build());
            break;
        case DOCUMENT:
            SchemaBuilder builderDoc = SchemaBuilder.struct();
            BsonDocument docs = keyValuesforSchema.getValue().asDocument();
            for (Entry<String, BsonValue> doc : docs.entrySet()) {
                addFieldSchema(doc, builderDoc);
            }
            builder.field(key, builderDoc.build());
            break;
        case ARRAY:
            if (keyValuesforSchema.getValue().asArray().isEmpty()) {
                builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_STRING_SCHEMA).build());
                break;
            } else {
                BsonType valueType = keyValuesforSchema.getValue().asArray().get(0).getBsonType();
                switch(valueType) {
                    case NULL:
                        LOG.warn("Data type {} not currently supported", valueType);
                        break;
                    case STRING:
                    case JAVASCRIPT:
                    case OBJECT_ID:
                    case DECIMAL128:
                        builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_STRING_SCHEMA).build());
                        break;
                    case DOUBLE:
                        builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_FLOAT64_SCHEMA).build());
                        break;
                    case BINARY:
                        builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_BYTES_SCHEMA).build());
                        break;
                    case INT32:
                    case TIMESTAMP:
                        builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_INT32_SCHEMA).build());
                        break;
                    case INT64:
                    case DATE_TIME:
                        builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_INT64_SCHEMA).build());
                        break;
                    case BOOLEAN:
                        builder.field(key, SchemaBuilder.array(Schema.OPTIONAL_BOOLEAN_SCHEMA).build());
                        break;
                    case DOCUMENT:
                        SchemaBuilder documentSchemaBuilder = SchemaBuilder.struct();
                        BsonDocument arrayDocs = keyValuesforSchema.getValue().asArray().get(0).asDocument();
                        for (Entry<String, BsonValue> arrayDoc : arrayDocs.entrySet()) {
                            addFieldSchema(arrayDoc, documentSchemaBuilder);
                        }
                        Schema build = documentSchemaBuilder.build();
                        builder.field(key, SchemaBuilder.array(build).build());
                        break;
                    default:
                        break;
                }
                break;
            }
        default:
            break;
    }
}
Also used : BsonType(org.bson.BsonType) BsonDocument(org.bson.BsonDocument) Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) BsonValue(org.bson.BsonValue)

Aggregations

SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)88 Schema (org.apache.kafka.connect.data.Schema)40 Test (org.junit.Test)40 Struct (org.apache.kafka.connect.data.Struct)23 Field (org.apache.kafka.connect.data.Field)13 SourceRecord (org.apache.kafka.connect.source.SourceRecord)13 Test (org.junit.jupiter.api.Test)9 ConnectSchema (org.apache.kafka.connect.data.ConnectSchema)6 BigDecimal (java.math.BigDecimal)5 Date (java.util.Date)5 Requirements.requireStruct (org.apache.kafka.connect.transforms.util.Requirements.requireStruct)5 ArrayList (java.util.ArrayList)4 HashMap (java.util.HashMap)4 DataException (org.apache.kafka.connect.errors.DataException)4 KsqlStream (io.confluent.ksql.metastore.KsqlStream)3 KsqlTopic (io.confluent.ksql.metastore.KsqlTopic)3 Expression (io.confluent.ksql.parser.tree.Expression)3 JsonNode (com.fasterxml.jackson.databind.JsonNode)2 SelectItem (io.confluent.ksql.parser.tree.SelectItem)2 SingleColumn (io.confluent.ksql.parser.tree.SingleColumn)2