Search in sources :

Example 6 with Envelope

use of io.debezium.data.Envelope in project debezium by debezium.

the class UnwrapFromEnvelopeTest method createCreateRecord.

private SourceRecord createCreateRecord() {
    final Schema recordSchema = SchemaBuilder.struct().field("id", SchemaBuilder.int8()).build();
    Envelope envelope = Envelope.defineSchema().withName("dummy.Envelope").withRecord(recordSchema).withSource(SchemaBuilder.struct().build()).build();
    final Struct before = new Struct(recordSchema);
    before.put("id", (byte) 1);
    final Struct payload = envelope.create(before, null, System.nanoTime());
    return new SourceRecord(new HashMap<>(), new HashMap<>(), "dummy", envelope.schema(), payload);
}
Also used : Schema(org.apache.kafka.connect.data.Schema) Envelope(io.debezium.data.Envelope) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Struct(org.apache.kafka.connect.data.Struct)

Example 7 with Envelope

use of io.debezium.data.Envelope in project debezium by debezium.

the class UnwrapFromEnvelopeTest method createDeleteRecord.

private SourceRecord createDeleteRecord() {
    final Schema recordSchema = SchemaBuilder.struct().field("id", SchemaBuilder.int8()).build();
    Envelope envelope = Envelope.defineSchema().withName("dummy.Envelope").withRecord(recordSchema).withSource(SchemaBuilder.struct().build()).build();
    final Struct before = new Struct(recordSchema);
    before.put("id", (byte) 1);
    final Struct payload = envelope.delete(before, null, System.nanoTime());
    return new SourceRecord(new HashMap<>(), new HashMap<>(), "dummy", envelope.schema(), payload);
}
Also used : Schema(org.apache.kafka.connect.data.Schema) Envelope(io.debezium.data.Envelope) SourceRecord(org.apache.kafka.connect.source.SourceRecord) Struct(org.apache.kafka.connect.data.Struct)

Example 8 with Envelope

use of io.debezium.data.Envelope in project debezium by debezium.

the class TableSchemaBuilder method create.

/**
 * Create a {@link TableSchema} from the given {@link Table table definition}. The resulting TableSchema will have a
 * {@link TableSchema#keySchema() key schema} that contains all of the columns that make up the table's primary key,
 * and a {@link TableSchema#valueSchema() value schema} that contains only those columns that are not in the table's primary
 * key.
 * <p>
 * This is equivalent to calling {@code create(table,false)}.
 *
 * @param schemaPrefix the prefix added to the table identifier to construct the schema names; may be null if there is no
 *            prefix
 * @param envelopSchemaName the name of the schema of the built table's envelope
 * @param table the table definition; may not be null
 * @param filter the filter that specifies whether columns in the table should be included; may be null if all columns
 *            are to be included
 * @param mappers the mapping functions for columns; may be null if none of the columns are to be mapped to different values
 * @return the table schema that can be used for sending rows of data for this table to Kafka Connect; never null
 */
public TableSchema create(String schemaPrefix, String envelopSchemaName, Table table, Predicate<ColumnId> filter, ColumnMappers mappers) {
    if (schemaPrefix == null)
        schemaPrefix = "";
    // Build the schemas ...
    final TableId tableId = table.id();
    final String tableIdStr = tableId.toString();
    final String schemaNamePrefix = schemaPrefix + tableIdStr;
    LOGGER.debug("Mapping table '{}' to schemas under '{}'", tableId, schemaNamePrefix);
    SchemaBuilder valSchemaBuilder = SchemaBuilder.struct().name(schemaNameAdjuster.adjust(schemaNamePrefix + ".Value"));
    SchemaBuilder keySchemaBuilder = SchemaBuilder.struct().name(schemaNameAdjuster.adjust(schemaNamePrefix + ".Key"));
    AtomicBoolean hasPrimaryKey = new AtomicBoolean(false);
    table.columns().forEach(column -> {
        if (table.isPrimaryKeyColumn(column.name())) {
            // The column is part of the primary key, so ALWAYS add it to the PK schema ...
            addField(keySchemaBuilder, column, null);
            hasPrimaryKey.set(true);
        }
        if (filter == null || filter.test(new ColumnId(tableId, column.name()))) {
            // Add the column to the value schema only if the column has not been filtered ...
            ColumnMapper mapper = mappers == null ? null : mappers.mapperFor(tableId, column);
            addField(valSchemaBuilder, column, mapper);
        }
    });
    Schema valSchema = valSchemaBuilder.optional().build();
    Schema keySchema = hasPrimaryKey.get() ? keySchemaBuilder.build() : null;
    if (LOGGER.isDebugEnabled()) {
        LOGGER.debug("Mapped primary key for table '{}' to schema: {}", tableId, SchemaUtil.asDetailedString(keySchema));
        LOGGER.debug("Mapped columns for table '{}' to schema: {}", tableId, SchemaUtil.asDetailedString(valSchema));
    }
    Envelope envelope = Envelope.defineSchema().withName(schemaNameAdjuster.adjust(envelopSchemaName)).withRecord(valSchema).withSource(sourceInfoSchema).build();
    // Create the generators ...
    Function<Object[], Object> keyGenerator = createKeyGenerator(keySchema, tableId, table.primaryKeyColumns());
    Function<Object[], Struct> valueGenerator = createValueGenerator(valSchema, tableId, table.columns(), filter, mappers);
    // And the table schema ...
    return new TableSchema(keySchema, keyGenerator, envelope, valSchema, valueGenerator);
}
Also used : Schema(org.apache.kafka.connect.data.Schema) Envelope(io.debezium.data.Envelope) Struct(org.apache.kafka.connect.data.Struct) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) ColumnMapper(io.debezium.relational.mapping.ColumnMapper)

Aggregations

Envelope (io.debezium.data.Envelope)8 Schema (org.apache.kafka.connect.data.Schema)8 Struct (org.apache.kafka.connect.data.Struct)8 SourceRecord (org.apache.kafka.connect.source.SourceRecord)7 TableSchema (io.debezium.relational.TableSchema)5 BlockingConsumer (io.debezium.function.BlockingConsumer)1 ColumnMapper (io.debezium.relational.mapping.ColumnMapper)1 BitSet (java.util.BitSet)1 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)1 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)1 SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)1