Search in sources :

Example 31 with Field

use of org.apache.kafka.connect.data.Field in project kafka by apache.

the class Flatten method buildUpdatedSchema.

/**
 * Build an updated Struct Schema which flattens all nested fields into a single struct, handling cases where
 * optionality and default values of the flattened fields are affected by the optionality and default values of
 * parent/ancestor schemas (e.g. flattened field is optional because the parent schema was optional, even if the
 * schema itself is marked as required).
 * @param schema the schema to translate
 * @param fieldNamePrefix the prefix to use on field names, i.e. the delimiter-joined set of ancestor field names
 * @param newSchema the flattened schema being built
 * @param optional true if any ancestor schema is optional
 * @param defaultFromParent the default value, if any, included via the parent/ancestor schemas
 */
private void buildUpdatedSchema(Schema schema, String fieldNamePrefix, SchemaBuilder newSchema, boolean optional, Struct defaultFromParent) {
    for (Field field : schema.fields()) {
        final String fieldName = fieldName(fieldNamePrefix, field.name());
        final boolean fieldIsOptional = optional || field.schema().isOptional();
        Object fieldDefaultValue = null;
        if (field.schema().defaultValue() != null) {
            fieldDefaultValue = field.schema().defaultValue();
        } else if (defaultFromParent != null) {
            fieldDefaultValue = defaultFromParent.get(field);
        }
        switch(field.schema().type()) {
            case INT8:
            case INT16:
            case INT32:
            case INT64:
            case FLOAT32:
            case FLOAT64:
            case BOOLEAN:
            case STRING:
            case BYTES:
            case ARRAY:
                newSchema.field(fieldName, convertFieldSchema(field.schema(), fieldIsOptional, fieldDefaultValue));
                break;
            case STRUCT:
                buildUpdatedSchema(field.schema(), fieldName, newSchema, fieldIsOptional, (Struct) fieldDefaultValue);
                break;
            default:
                throw new DataException("Flatten transformation does not support " + field.schema().type() + " for record with schemas (for field " + fieldName + ").");
        }
    }
}
Also used : Field(org.apache.kafka.connect.data.Field) DataException(org.apache.kafka.connect.errors.DataException)

Example 32 with Field

use of org.apache.kafka.connect.data.Field in project kafka by apache.

the class MaskFieldTest method testWithSchema.

@Test
public void testWithSchema() {
    final List<String> maskFields = new ArrayList<>(SCHEMA.fields().size());
    for (Field field : SCHEMA.fields()) {
        if (!field.name().equals("magic")) {
            maskFields.add(field.name());
        }
    }
    final Struct updatedValue = (Struct) transform(maskFields, null).apply(record(SCHEMA, VALUES_WITH_SCHEMA)).value();
    assertEquals(42, updatedValue.get("magic"));
    assertEquals(false, updatedValue.get("bool"));
    assertEquals((byte) 0, updatedValue.get("byte"));
    assertEquals((short) 0, updatedValue.get("short"));
    assertEquals(0, updatedValue.get("int"));
    assertEquals(0L, updatedValue.get("long"));
    assertEquals(0f, updatedValue.get("float"));
    assertEquals(0d, updatedValue.get("double"));
    assertEquals("", updatedValue.get("string"));
    assertEquals(new Date(0), updatedValue.get("date"));
    assertEquals(new Date(0), updatedValue.get("time"));
    assertEquals(new Date(0), updatedValue.get("timestamp"));
    assertEquals(BigDecimal.ZERO, updatedValue.get("decimal"));
    assertEquals(Collections.emptyList(), updatedValue.get("array"));
    assertEquals(Collections.emptyMap(), updatedValue.get("map"));
}
Also used : Field(org.apache.kafka.connect.data.Field) ArrayList(java.util.ArrayList) Date(java.util.Date) Struct(org.apache.kafka.connect.data.Struct) Test(org.junit.jupiter.api.Test)

Example 33 with Field

use of org.apache.kafka.connect.data.Field in project kafka by apache.

the class ReplaceField method applyWithSchema.

private R applyWithSchema(R record) {
    final Struct value = requireStruct(operatingValue(record), PURPOSE);
    Schema updatedSchema = schemaUpdateCache.get(value.schema());
    if (updatedSchema == null) {
        updatedSchema = makeUpdatedSchema(value.schema());
        schemaUpdateCache.put(value.schema(), updatedSchema);
    }
    final Struct updatedValue = new Struct(updatedSchema);
    for (Field field : updatedSchema.fields()) {
        final Object fieldValue = value.get(reverseRenamed(field.name()));
        updatedValue.put(field.name(), fieldValue);
    }
    return newRecord(record, updatedSchema, updatedValue);
}
Also used : Field(org.apache.kafka.connect.data.Field) Schema(org.apache.kafka.connect.data.Schema) Requirements.requireStruct(org.apache.kafka.connect.transforms.util.Requirements.requireStruct) Struct(org.apache.kafka.connect.data.Struct)

Example 34 with Field

use of org.apache.kafka.connect.data.Field in project kafka by apache.

the class SetSchemaMetadata method updateSchemaIn.

/**
 * Utility to check the supplied key or value for references to the old Schema,
 * and if so to return an updated key or value object that references the new Schema.
 * Note that this method assumes that the new Schema may have a different name and/or version,
 * but has fields that exactly match those of the old Schema.
 * <p>
 * Currently only {@link Struct} objects have references to the {@link Schema}.
 *
 * @param keyOrValue    the key or value object; may be null
 * @param updatedSchema the updated schema that has been potentially renamed
 * @return the original key or value object if it does not reference the old schema, or
 * a copy of the key or value object with updated references to the new schema.
 */
protected static Object updateSchemaIn(Object keyOrValue, Schema updatedSchema) {
    if (keyOrValue instanceof Struct) {
        Struct origStruct = (Struct) keyOrValue;
        Struct newStruct = new Struct(updatedSchema);
        for (Field field : updatedSchema.fields()) {
            // assume both schemas have exact same fields with same names and schemas ...
            newStruct.put(field, origStruct.get(field));
        }
        return newStruct;
    }
    return keyOrValue;
}
Also used : Field(org.apache.kafka.connect.data.Field) Struct(org.apache.kafka.connect.data.Struct)

Example 35 with Field

use of org.apache.kafka.connect.data.Field in project kafka by apache.

the class SetSchemaMetadataTest method assertMatchingSchema.

protected void assertMatchingSchema(Struct value, Schema schema) {
    assertSame(schema, value.schema());
    assertEquals(schema.name(), value.schema().name());
    for (Field field : schema.fields()) {
        String fieldName = field.name();
        assertEquals(schema.field(fieldName).name(), value.schema().field(fieldName).name());
        assertEquals(schema.field(fieldName).index(), value.schema().field(fieldName).index());
        assertSame(schema.field(fieldName).schema(), value.schema().field(fieldName).schema());
    }
}
Also used : Field(org.apache.kafka.connect.data.Field)

Aggregations

Field (org.apache.kafka.connect.data.Field)82 Struct (org.apache.kafka.connect.data.Struct)38 Schema (org.apache.kafka.connect.data.Schema)33 SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)17 DataException (org.apache.kafka.connect.errors.DataException)14 List (java.util.List)12 ArrayList (java.util.ArrayList)11 Requirements.requireStruct (org.apache.kafka.connect.transforms.util.Requirements.requireStruct)11 HashMap (java.util.HashMap)10 Map (java.util.Map)8 Test (org.junit.Test)8 Date (java.util.Date)7 ConnectSchema (org.apache.kafka.connect.data.ConnectSchema)6 KsqlException (io.confluent.ksql.util.KsqlException)5 BigDecimal (java.math.BigDecimal)5 ArrayNode (com.fasterxml.jackson.databind.node.ArrayNode)4 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)4 SchemaKStream (io.confluent.ksql.structured.SchemaKStream)4 ByteBuffer (java.nio.ByteBuffer)4 JsonNode (com.fasterxml.jackson.databind.JsonNode)3