Search in sources :

Example 16 with Field

use of org.apache.kafka.connect.data.Field in project apache-kafka-on-k8s by banzaicloud.

the class MaskField method applyWithSchema.

private R applyWithSchema(R record) {
    final Struct value = requireStruct(operatingValue(record), PURPOSE);
    final Struct updatedValue = new Struct(value.schema());
    for (Field field : value.schema().fields()) {
        final Object origFieldValue = value.get(field);
        updatedValue.put(field, maskedFields.contains(field.name()) ? masked(origFieldValue) : origFieldValue);
    }
    return newRecord(record, updatedValue);
}
Also used : Field(org.apache.kafka.connect.data.Field) Requirements.requireStruct(org.apache.kafka.connect.transforms.util.Requirements.requireStruct) Struct(org.apache.kafka.connect.data.Struct)

Example 17 with Field

use of org.apache.kafka.connect.data.Field in project apache-kafka-on-k8s by banzaicloud.

the class ReplaceField method applyWithSchema.

private R applyWithSchema(R record) {
    final Struct value = requireStruct(operatingValue(record), PURPOSE);
    Schema updatedSchema = schemaUpdateCache.get(value.schema());
    if (updatedSchema == null) {
        updatedSchema = makeUpdatedSchema(value.schema());
        schemaUpdateCache.put(value.schema(), updatedSchema);
    }
    final Struct updatedValue = new Struct(updatedSchema);
    for (Field field : updatedSchema.fields()) {
        final Object fieldValue = value.get(reverseRenamed(field.name()));
        updatedValue.put(field.name(), fieldValue);
    }
    return newRecord(record, updatedSchema, updatedValue);
}
Also used : Field(org.apache.kafka.connect.data.Field) Schema(org.apache.kafka.connect.data.Schema) Requirements.requireStruct(org.apache.kafka.connect.transforms.util.Requirements.requireStruct) Struct(org.apache.kafka.connect.data.Struct)

Example 18 with Field

use of org.apache.kafka.connect.data.Field in project apache-kafka-on-k8s by banzaicloud.

the class SetSchemaMetadata method updateSchemaIn.

/**
 * Utility to check the supplied key or value for references to the old Schema,
 * and if so to return an updated key or value object that references the new Schema.
 * Note that this method assumes that the new Schema may have a different name and/or version,
 * but has fields that exactly match those of the old Schema.
 * <p>
 * Currently only {@link Struct} objects have references to the {@link Schema}.
 *
 * @param keyOrValue    the key or value object; may be null
 * @param updatedSchema the updated schema that has been potentially renamed
 * @return the original key or value object if it does not reference the old schema, or
 * a copy of the key or value object with updated references to the new schema.
 */
protected static Object updateSchemaIn(Object keyOrValue, Schema updatedSchema) {
    if (keyOrValue instanceof Struct) {
        Struct origStruct = (Struct) keyOrValue;
        Struct newStruct = new Struct(updatedSchema);
        for (Field field : updatedSchema.fields()) {
            // assume both schemas have exact same fields with same names and schemas ...
            newStruct.put(field, origStruct.get(field));
        }
        return newStruct;
    }
    return keyOrValue;
}
Also used : Field(org.apache.kafka.connect.data.Field) Struct(org.apache.kafka.connect.data.Struct)

Example 19 with Field

use of org.apache.kafka.connect.data.Field in project apache-kafka-on-k8s by banzaicloud.

the class TimestampConverter method applyWithSchema.

private R applyWithSchema(R record) {
    final Schema schema = operatingSchema(record);
    if (config.field.isEmpty()) {
        Object value = operatingValue(record);
        // New schema is determined by the requested target timestamp type
        Schema updatedSchema = TRANSLATORS.get(config.type).typeSchema();
        return newRecord(record, updatedSchema, convertTimestamp(value, timestampTypeFromSchema(schema)));
    } else {
        final Struct value = requireStruct(operatingValue(record), PURPOSE);
        Schema updatedSchema = schemaUpdateCache.get(value.schema());
        if (updatedSchema == null) {
            SchemaBuilder builder = SchemaUtil.copySchemaBasics(schema, SchemaBuilder.struct());
            for (Field field : schema.fields()) {
                if (field.name().equals(config.field)) {
                    builder.field(field.name(), TRANSLATORS.get(config.type).typeSchema());
                } else {
                    builder.field(field.name(), field.schema());
                }
            }
            if (schema.isOptional())
                builder.optional();
            if (schema.defaultValue() != null) {
                Struct updatedDefaultValue = applyValueWithSchema((Struct) schema.defaultValue(), builder);
                builder.defaultValue(updatedDefaultValue);
            }
            updatedSchema = builder.build();
            schemaUpdateCache.put(schema, updatedSchema);
        }
        Struct updatedValue = applyValueWithSchema(value, updatedSchema);
        return newRecord(record, updatedSchema, updatedValue);
    }
}
Also used : Field(org.apache.kafka.connect.data.Field) Schema(org.apache.kafka.connect.data.Schema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) Requirements.requireStruct(org.apache.kafka.connect.transforms.util.Requirements.requireStruct) Struct(org.apache.kafka.connect.data.Struct)

Example 20 with Field

use of org.apache.kafka.connect.data.Field in project apache-kafka-on-k8s by banzaicloud.

the class MaskFieldTest method withSchema.

@Test
public void withSchema() {
    Schema schema = SchemaBuilder.struct().field("magic", Schema.INT32_SCHEMA).field("bool", Schema.BOOLEAN_SCHEMA).field("byte", Schema.INT8_SCHEMA).field("short", Schema.INT16_SCHEMA).field("int", Schema.INT32_SCHEMA).field("long", Schema.INT64_SCHEMA).field("float", Schema.FLOAT32_SCHEMA).field("double", Schema.FLOAT64_SCHEMA).field("string", Schema.STRING_SCHEMA).field("date", org.apache.kafka.connect.data.Date.SCHEMA).field("time", Time.SCHEMA).field("timestamp", Timestamp.SCHEMA).field("decimal", Decimal.schema(0)).field("array", SchemaBuilder.array(Schema.INT32_SCHEMA)).field("map", SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.STRING_SCHEMA)).build();
    final Struct value = new Struct(schema);
    value.put("magic", 42);
    value.put("bool", true);
    value.put("byte", (byte) 42);
    value.put("short", (short) 42);
    value.put("int", 42);
    value.put("long", 42L);
    value.put("float", 42f);
    value.put("double", 42d);
    value.put("string", "hmm");
    value.put("date", new Date());
    value.put("time", new Date());
    value.put("timestamp", new Date());
    value.put("decimal", new BigDecimal(42));
    value.put("array", Arrays.asList(1, 2, 3));
    value.put("map", Collections.singletonMap("what", "what"));
    final List<String> maskFields = new ArrayList<>(schema.fields().size());
    for (Field field : schema.fields()) {
        if (!field.name().equals("magic")) {
            maskFields.add(field.name());
        }
    }
    final Struct updatedValue = (Struct) transform(maskFields).apply(record(schema, value)).value();
    assertEquals(42, updatedValue.get("magic"));
    assertEquals(false, updatedValue.get("bool"));
    assertEquals((byte) 0, updatedValue.get("byte"));
    assertEquals((short) 0, updatedValue.get("short"));
    assertEquals(0, updatedValue.get("int"));
    assertEquals(0L, updatedValue.get("long"));
    assertEquals(0f, updatedValue.get("float"));
    assertEquals(0d, updatedValue.get("double"));
    assertEquals("", updatedValue.get("string"));
    assertEquals(new Date(0), updatedValue.get("date"));
    assertEquals(new Date(0), updatedValue.get("time"));
    assertEquals(new Date(0), updatedValue.get("timestamp"));
    assertEquals(BigDecimal.ZERO, updatedValue.get("decimal"));
    assertEquals(Collections.emptyList(), updatedValue.get("array"));
    assertEquals(Collections.emptyMap(), updatedValue.get("map"));
}
Also used : Field(org.apache.kafka.connect.data.Field) Schema(org.apache.kafka.connect.data.Schema) ArrayList(java.util.ArrayList) Date(java.util.Date) BigDecimal(java.math.BigDecimal) Struct(org.apache.kafka.connect.data.Struct) Test(org.junit.Test)

Aggregations

Field (org.apache.kafka.connect.data.Field)82 Struct (org.apache.kafka.connect.data.Struct)38 Schema (org.apache.kafka.connect.data.Schema)33 SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)17 DataException (org.apache.kafka.connect.errors.DataException)14 List (java.util.List)12 ArrayList (java.util.ArrayList)11 Requirements.requireStruct (org.apache.kafka.connect.transforms.util.Requirements.requireStruct)11 HashMap (java.util.HashMap)10 Map (java.util.Map)8 Test (org.junit.Test)8 Date (java.util.Date)7 ConnectSchema (org.apache.kafka.connect.data.ConnectSchema)6 KsqlException (io.confluent.ksql.util.KsqlException)5 BigDecimal (java.math.BigDecimal)5 ArrayNode (com.fasterxml.jackson.databind.node.ArrayNode)4 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)4 SchemaKStream (io.confluent.ksql.structured.SchemaKStream)4 ByteBuffer (java.nio.ByteBuffer)4 JsonNode (com.fasterxml.jackson.databind.JsonNode)3