Search in sources :

Example 56 with Field

use of org.apache.kafka.connect.data.Field in project connect-utils by jcustenborder.

the class BaseDocumentationTest method buildSchemaInput.

Plugin.SchemaInput buildSchemaInput(Schema schema, String fieldName) {
    ImmutableSchemaInput.Builder schemaInput = ImmutableSchemaInput.builder().name(schema.name()).doc(schema.doc()).type(schema.type()).fieldName(fieldName).isOptional(schema.isOptional());
    if (Schema.Type.STRUCT == schema.type()) {
        for (Field field : schema.fields()) {
            Plugin.SchemaInput fieldSchema = buildSchemaInput(field.schema(), field.name());
            schemaInput.addFields(fieldSchema);
        }
    } else if (Schema.Type.MAP == schema.type()) {
        schemaInput.key(buildSchemaInput(schema.keySchema()));
        schemaInput.value(buildSchemaInput(schema.valueSchema()));
    } else if (Schema.Type.ARRAY == schema.type()) {
        schemaInput.value(buildSchemaInput(schema.valueSchema()));
    }
    return schemaInput.build();
}
Also used : ImmutableSchemaInput(com.github.jcustenborder.kafka.connect.utils.templates.ImmutableSchemaInput) Field(org.apache.kafka.connect.data.Field) Plugin(com.github.jcustenborder.kafka.connect.utils.templates.Plugin)

Example 57 with Field

use of org.apache.kafka.connect.data.Field in project connect-utils by jcustenborder.

the class AbstractConverter method convertStruct.

void convertStruct(final T result, Struct struct) {
    final Schema schema = struct.schema();
    for (final Field field : schema.fields()) {
        final String fieldName = field.name();
        log.trace("convertStruct() - Processing '{}'", field.name());
        final Object fieldValue = struct.get(field);
        try {
            if (null == fieldValue) {
                log.trace("convertStruct() - Setting '{}' to null.", fieldName);
                setNullField(result, fieldName);
                continue;
            }
            log.trace("convertStruct() - Field '{}'.field().schema().type() = '{}'", fieldName, field.schema().type());
            switch(field.schema().type()) {
                case STRING:
                    log.trace("convertStruct() - Processing '{}' as string.", fieldName);
                    setStringField(result, fieldName, (String) fieldValue);
                    break;
                case INT8:
                    log.trace("convertStruct() - Processing '{}' as int8.", fieldName);
                    setInt8Field(result, fieldName, (Byte) fieldValue);
                    break;
                case INT16:
                    log.trace("convertStruct() - Processing '{}' as int16.", fieldName);
                    setInt16Field(result, fieldName, (Short) fieldValue);
                    break;
                case INT32:
                    if (org.apache.kafka.connect.data.Date.LOGICAL_NAME.equals(field.schema().name())) {
                        log.trace("convertStruct() - Processing '{}' as date.", fieldName);
                        setDateField(result, fieldName, (Date) fieldValue);
                    } else if (org.apache.kafka.connect.data.Time.LOGICAL_NAME.equals(field.schema().name())) {
                        log.trace("convertStruct() - Processing '{}' as time.", fieldName);
                        setTimeField(result, fieldName, (Date) fieldValue);
                    } else {
                        Integer int32Value = (Integer) fieldValue;
                        log.trace("convertStruct() - Processing '{}' as int32.", fieldName);
                        setInt32Field(result, fieldName, int32Value);
                    }
                    break;
                case INT64:
                    if (Timestamp.LOGICAL_NAME.equals(field.schema().name())) {
                        log.trace("convertStruct() - Processing '{}' as timestamp.", fieldName);
                        setTimestampField(result, fieldName, (Date) fieldValue);
                    } else {
                        Long int64Value = (Long) fieldValue;
                        log.trace("convertStruct() - Processing '{}' as int64.", fieldName);
                        setInt64Field(result, fieldName, int64Value);
                    }
                    break;
                case BYTES:
                    if (Decimal.LOGICAL_NAME.equals(field.schema().name())) {
                        log.trace("convertStruct() - Processing '{}' as decimal.", fieldName);
                        setDecimalField(result, fieldName, (BigDecimal) fieldValue);
                    } else {
                        byte[] bytes = (byte[]) fieldValue;
                        log.trace("convertStruct() - Processing '{}' as bytes.", fieldName);
                        setBytesField(result, fieldName, bytes);
                    }
                    break;
                case FLOAT32:
                    log.trace("convertStruct() - Processing '{}' as float32.", fieldName);
                    setFloat32Field(result, fieldName, (Float) fieldValue);
                    break;
                case FLOAT64:
                    log.trace("convertStruct() - Processing '{}' as float64.", fieldName);
                    setFloat64Field(result, fieldName, (Double) fieldValue);
                    break;
                case BOOLEAN:
                    log.trace("convertStruct() - Processing '{}' as boolean.", fieldName);
                    setBooleanField(result, fieldName, (Boolean) fieldValue);
                    break;
                case STRUCT:
                    log.trace("convertStruct() - Processing '{}' as struct.", fieldName);
                    setStructField(result, fieldName, (Struct) fieldValue);
                    break;
                case ARRAY:
                    log.trace("convertStruct() - Processing '{}' as array.", fieldName);
                    setArray(result, fieldName, schema, (List) fieldValue);
                    break;
                case MAP:
                    log.trace("convertStruct() - Processing '{}' as map.", fieldName);
                    setMap(result, fieldName, schema, (Map) fieldValue);
                    break;
                default:
                    throw new DataException("Unsupported schema.type(): " + schema.type());
            }
        } catch (Exception ex) {
            throw new DataException(String.format("Exception thrown while processing field '%s'", fieldName), ex);
        }
    }
}
Also used : BigInteger(java.math.BigInteger) Field(org.apache.kafka.connect.data.Field) DataException(org.apache.kafka.connect.errors.DataException) Schema(org.apache.kafka.connect.data.Schema) Date(java.util.Date) DataException(org.apache.kafka.connect.errors.DataException)

Example 58 with Field

use of org.apache.kafka.connect.data.Field in project connect-utils by jcustenborder.

the class StructHelperTest method assertFoo.

void assertFoo(int count, Struct struct) {
    assertEquals(SCHEMA_NAME, struct.schema().name(), "struct.schema().name() does not match.");
    assertNotNull(struct, "struct should not be null");
    assertEquals(count, struct.schema().fields().size(), "struct.schema().fields().size() does not match.");
    for (int i = 1; i <= count; i++) {
        final String fieldName = String.format("f%s", i);
        Field field = struct.schema().field(fieldName);
        assertNotNull(field, "schema should have field " + fieldName);
        assertEquals(Type.INT32, field.schema().type(), "schema().type() for " + fieldName + " does not match.");
        final Integer expectedValue = i;
        final Integer actualValue = struct.getInt32(fieldName);
        assertEquals(expectedValue, actualValue, String.format("value for field %s does not match", fieldName));
    }
}
Also used : Field(org.apache.kafka.connect.data.Field)

Example 59 with Field

use of org.apache.kafka.connect.data.Field in project connect-utils by jcustenborder.

the class StructHelper method asMap.

public static Map<String, Object> asMap(Struct struct) {
    Preconditions.checkNotNull(struct, "struct cannot be null.");
    Map<String, Object> result = new LinkedHashMap<>(struct.schema().fields().size());
    for (Field field : struct.schema().fields()) {
        final Object value;
        if (Schema.Type.STRUCT == field.schema().type()) {
            Struct s = struct.getStruct(field.name());
            value = asMap(s);
        } else {
            value = struct.get(field);
        }
        result.put(field.name(), value);
    }
    return result;
}
Also used : Field(org.apache.kafka.connect.data.Field) LinkedHashMap(java.util.LinkedHashMap) Struct(org.apache.kafka.connect.data.Struct)

Example 60 with Field

use of org.apache.kafka.connect.data.Field in project apache-kafka-on-k8s by banzaicloud.

the class JsonConverter method asJsonSchema.

public ObjectNode asJsonSchema(Schema schema) {
    if (schema == null)
        return null;
    ObjectNode cached = fromConnectSchemaCache.get(schema);
    if (cached != null)
        return cached;
    final ObjectNode jsonSchema;
    switch(schema.type()) {
        case BOOLEAN:
            jsonSchema = JsonSchema.BOOLEAN_SCHEMA.deepCopy();
            break;
        case BYTES:
            jsonSchema = JsonSchema.BYTES_SCHEMA.deepCopy();
            break;
        case FLOAT64:
            jsonSchema = JsonSchema.DOUBLE_SCHEMA.deepCopy();
            break;
        case FLOAT32:
            jsonSchema = JsonSchema.FLOAT_SCHEMA.deepCopy();
            break;
        case INT8:
            jsonSchema = JsonSchema.INT8_SCHEMA.deepCopy();
            break;
        case INT16:
            jsonSchema = JsonSchema.INT16_SCHEMA.deepCopy();
            break;
        case INT32:
            jsonSchema = JsonSchema.INT32_SCHEMA.deepCopy();
            break;
        case INT64:
            jsonSchema = JsonSchema.INT64_SCHEMA.deepCopy();
            break;
        case STRING:
            jsonSchema = JsonSchema.STRING_SCHEMA.deepCopy();
            break;
        case ARRAY:
            jsonSchema = JsonNodeFactory.instance.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.ARRAY_TYPE_NAME);
            jsonSchema.set(JsonSchema.ARRAY_ITEMS_FIELD_NAME, asJsonSchema(schema.valueSchema()));
            break;
        case MAP:
            jsonSchema = JsonNodeFactory.instance.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.MAP_TYPE_NAME);
            jsonSchema.set(JsonSchema.MAP_KEY_FIELD_NAME, asJsonSchema(schema.keySchema()));
            jsonSchema.set(JsonSchema.MAP_VALUE_FIELD_NAME, asJsonSchema(schema.valueSchema()));
            break;
        case STRUCT:
            jsonSchema = JsonNodeFactory.instance.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.STRUCT_TYPE_NAME);
            ArrayNode fields = JsonNodeFactory.instance.arrayNode();
            for (Field field : schema.fields()) {
                ObjectNode fieldJsonSchema = asJsonSchema(field.schema()).deepCopy();
                fieldJsonSchema.put(JsonSchema.STRUCT_FIELD_NAME_FIELD_NAME, field.name());
                fields.add(fieldJsonSchema);
            }
            jsonSchema.set(JsonSchema.STRUCT_FIELDS_FIELD_NAME, fields);
            break;
        default:
            throw new DataException("Couldn't translate unsupported schema type " + schema + ".");
    }
    jsonSchema.put(JsonSchema.SCHEMA_OPTIONAL_FIELD_NAME, schema.isOptional());
    if (schema.name() != null)
        jsonSchema.put(JsonSchema.SCHEMA_NAME_FIELD_NAME, schema.name());
    if (schema.version() != null)
        jsonSchema.put(JsonSchema.SCHEMA_VERSION_FIELD_NAME, schema.version());
    if (schema.doc() != null)
        jsonSchema.put(JsonSchema.SCHEMA_DOC_FIELD_NAME, schema.doc());
    if (schema.parameters() != null) {
        ObjectNode jsonSchemaParams = JsonNodeFactory.instance.objectNode();
        for (Map.Entry<String, String> prop : schema.parameters().entrySet()) jsonSchemaParams.put(prop.getKey(), prop.getValue());
        jsonSchema.set(JsonSchema.SCHEMA_PARAMETERS_FIELD_NAME, jsonSchemaParams);
    }
    if (schema.defaultValue() != null)
        jsonSchema.set(JsonSchema.SCHEMA_DEFAULT_FIELD_NAME, convertToJson(schema, schema.defaultValue()));
    fromConnectSchemaCache.put(schema, jsonSchema);
    return jsonSchema;
}
Also used : Field(org.apache.kafka.connect.data.Field) DataException(org.apache.kafka.connect.errors.DataException) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) ArrayNode(com.fasterxml.jackson.databind.node.ArrayNode) HashMap(java.util.HashMap) Map(java.util.Map) EnumMap(java.util.EnumMap)

Aggregations

Field (org.apache.kafka.connect.data.Field)82 Struct (org.apache.kafka.connect.data.Struct)38 Schema (org.apache.kafka.connect.data.Schema)33 SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)17 DataException (org.apache.kafka.connect.errors.DataException)14 List (java.util.List)12 ArrayList (java.util.ArrayList)11 Requirements.requireStruct (org.apache.kafka.connect.transforms.util.Requirements.requireStruct)11 HashMap (java.util.HashMap)10 Map (java.util.Map)8 Test (org.junit.Test)8 Date (java.util.Date)7 ConnectSchema (org.apache.kafka.connect.data.ConnectSchema)6 KsqlException (io.confluent.ksql.util.KsqlException)5 BigDecimal (java.math.BigDecimal)5 ArrayNode (com.fasterxml.jackson.databind.node.ArrayNode)4 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)4 SchemaKStream (io.confluent.ksql.structured.SchemaKStream)4 ByteBuffer (java.nio.ByteBuffer)4 JsonNode (com.fasterxml.jackson.databind.JsonNode)3