Search in sources :

Example 6 with DataException

use of org.apache.kafka.connect.errors.DataException in project kafka by apache.

the class JsonConverter method asConnectSchema.

public Schema asConnectSchema(JsonNode jsonSchema) {
    if (jsonSchema.isNull())
        return null;
    Schema cached = toConnectSchemaCache.get(jsonSchema);
    if (cached != null)
        return cached;
    JsonNode schemaTypeNode = jsonSchema.get(JsonSchema.SCHEMA_TYPE_FIELD_NAME);
    if (schemaTypeNode == null || !schemaTypeNode.isTextual())
        throw new DataException("Schema must contain 'type' field");
    final SchemaBuilder builder;
    switch(schemaTypeNode.textValue()) {
        case JsonSchema.BOOLEAN_TYPE_NAME:
            builder = SchemaBuilder.bool();
            break;
        case JsonSchema.INT8_TYPE_NAME:
            builder = SchemaBuilder.int8();
            break;
        case JsonSchema.INT16_TYPE_NAME:
            builder = SchemaBuilder.int16();
            break;
        case JsonSchema.INT32_TYPE_NAME:
            builder = SchemaBuilder.int32();
            break;
        case JsonSchema.INT64_TYPE_NAME:
            builder = SchemaBuilder.int64();
            break;
        case JsonSchema.FLOAT_TYPE_NAME:
            builder = SchemaBuilder.float32();
            break;
        case JsonSchema.DOUBLE_TYPE_NAME:
            builder = SchemaBuilder.float64();
            break;
        case JsonSchema.BYTES_TYPE_NAME:
            builder = SchemaBuilder.bytes();
            break;
        case JsonSchema.STRING_TYPE_NAME:
            builder = SchemaBuilder.string();
            break;
        case JsonSchema.ARRAY_TYPE_NAME:
            JsonNode elemSchema = jsonSchema.get(JsonSchema.ARRAY_ITEMS_FIELD_NAME);
            if (elemSchema == null)
                throw new DataException("Array schema did not specify the element type");
            builder = SchemaBuilder.array(asConnectSchema(elemSchema));
            break;
        case JsonSchema.MAP_TYPE_NAME:
            JsonNode keySchema = jsonSchema.get(JsonSchema.MAP_KEY_FIELD_NAME);
            if (keySchema == null)
                throw new DataException("Map schema did not specify the key type");
            JsonNode valueSchema = jsonSchema.get(JsonSchema.MAP_VALUE_FIELD_NAME);
            if (valueSchema == null)
                throw new DataException("Map schema did not specify the value type");
            builder = SchemaBuilder.map(asConnectSchema(keySchema), asConnectSchema(valueSchema));
            break;
        case JsonSchema.STRUCT_TYPE_NAME:
            builder = SchemaBuilder.struct();
            JsonNode fields = jsonSchema.get(JsonSchema.STRUCT_FIELDS_FIELD_NAME);
            if (fields == null || !fields.isArray())
                throw new DataException("Struct schema's \"fields\" argument is not an array.");
            for (JsonNode field : fields) {
                JsonNode jsonFieldName = field.get(JsonSchema.STRUCT_FIELD_NAME_FIELD_NAME);
                if (jsonFieldName == null || !jsonFieldName.isTextual())
                    throw new DataException("Struct schema's field name not specified properly");
                builder.field(jsonFieldName.asText(), asConnectSchema(field));
            }
            break;
        default:
            throw new DataException("Unknown schema type: " + schemaTypeNode.textValue());
    }
    JsonNode schemaOptionalNode = jsonSchema.get(JsonSchema.SCHEMA_OPTIONAL_FIELD_NAME);
    if (schemaOptionalNode != null && schemaOptionalNode.isBoolean() && schemaOptionalNode.booleanValue())
        builder.optional();
    else
        builder.required();
    JsonNode schemaNameNode = jsonSchema.get(JsonSchema.SCHEMA_NAME_FIELD_NAME);
    if (schemaNameNode != null && schemaNameNode.isTextual())
        builder.name(schemaNameNode.textValue());
    JsonNode schemaVersionNode = jsonSchema.get(JsonSchema.SCHEMA_VERSION_FIELD_NAME);
    if (schemaVersionNode != null && schemaVersionNode.isIntegralNumber()) {
        builder.version(schemaVersionNode.intValue());
    }
    JsonNode schemaDocNode = jsonSchema.get(JsonSchema.SCHEMA_DOC_FIELD_NAME);
    if (schemaDocNode != null && schemaDocNode.isTextual())
        builder.doc(schemaDocNode.textValue());
    JsonNode schemaParamsNode = jsonSchema.get(JsonSchema.SCHEMA_PARAMETERS_FIELD_NAME);
    if (schemaParamsNode != null && schemaParamsNode.isObject()) {
        Iterator<Map.Entry<String, JsonNode>> paramsIt = schemaParamsNode.fields();
        while (paramsIt.hasNext()) {
            Map.Entry<String, JsonNode> entry = paramsIt.next();
            JsonNode paramValue = entry.getValue();
            if (!paramValue.isTextual())
                throw new DataException("Schema parameters must have string values.");
            builder.parameter(entry.getKey(), paramValue.textValue());
        }
    }
    JsonNode schemaDefaultNode = jsonSchema.get(JsonSchema.SCHEMA_DEFAULT_FIELD_NAME);
    if (schemaDefaultNode != null)
        builder.defaultValue(convertToConnect(builder, schemaDefaultNode));
    Schema result = builder.build();
    toConnectSchemaCache.put(jsonSchema, result);
    return result;
}
Also used : DataException(org.apache.kafka.connect.errors.DataException) Schema(org.apache.kafka.connect.data.Schema) ConnectSchema(org.apache.kafka.connect.data.ConnectSchema) SchemaBuilder(org.apache.kafka.connect.data.SchemaBuilder) JsonNode(com.fasterxml.jackson.databind.JsonNode) HashMap(java.util.HashMap) Map(java.util.Map)

Example 7 with DataException

use of org.apache.kafka.connect.errors.DataException in project kafka by apache.

the class JsonConverter method toConnectData.

@Override
public SchemaAndValue toConnectData(String topic, byte[] value) {
    JsonNode jsonValue;
    try {
        jsonValue = deserializer.deserialize(topic, value);
    } catch (SerializationException e) {
        throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e);
    }
    if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload")))
        throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration.");
    // was stripped during serialization and we need to fill in an all-encompassing schema.
    if (!enableSchemas) {
        ObjectNode envelope = JsonNodeFactory.instance.objectNode();
        envelope.set("schema", null);
        envelope.set("payload", jsonValue);
        jsonValue = envelope;
    }
    return jsonToConnect(jsonValue);
}
Also used : DataException(org.apache.kafka.connect.errors.DataException) SerializationException(org.apache.kafka.common.errors.SerializationException) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) JsonNode(com.fasterxml.jackson.databind.JsonNode)

Example 8 with DataException

use of org.apache.kafka.connect.errors.DataException in project kafka by apache.

the class JsonConverter method asJsonSchema.

public ObjectNode asJsonSchema(Schema schema) {
    if (schema == null)
        return null;
    ObjectNode cached = fromConnectSchemaCache.get(schema);
    if (cached != null)
        return cached;
    final ObjectNode jsonSchema;
    switch(schema.type()) {
        case BOOLEAN:
            jsonSchema = JsonSchema.BOOLEAN_SCHEMA.deepCopy();
            break;
        case BYTES:
            jsonSchema = JsonSchema.BYTES_SCHEMA.deepCopy();
            break;
        case FLOAT64:
            jsonSchema = JsonSchema.DOUBLE_SCHEMA.deepCopy();
            break;
        case FLOAT32:
            jsonSchema = JsonSchema.FLOAT_SCHEMA.deepCopy();
            break;
        case INT8:
            jsonSchema = JsonSchema.INT8_SCHEMA.deepCopy();
            break;
        case INT16:
            jsonSchema = JsonSchema.INT16_SCHEMA.deepCopy();
            break;
        case INT32:
            jsonSchema = JsonSchema.INT32_SCHEMA.deepCopy();
            break;
        case INT64:
            jsonSchema = JsonSchema.INT64_SCHEMA.deepCopy();
            break;
        case STRING:
            jsonSchema = JsonSchema.STRING_SCHEMA.deepCopy();
            break;
        case ARRAY:
            jsonSchema = JsonNodeFactory.instance.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.ARRAY_TYPE_NAME);
            jsonSchema.set(JsonSchema.ARRAY_ITEMS_FIELD_NAME, asJsonSchema(schema.valueSchema()));
            break;
        case MAP:
            jsonSchema = JsonNodeFactory.instance.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.MAP_TYPE_NAME);
            jsonSchema.set(JsonSchema.MAP_KEY_FIELD_NAME, asJsonSchema(schema.keySchema()));
            jsonSchema.set(JsonSchema.MAP_VALUE_FIELD_NAME, asJsonSchema(schema.valueSchema()));
            break;
        case STRUCT:
            jsonSchema = JsonNodeFactory.instance.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.STRUCT_TYPE_NAME);
            ArrayNode fields = JsonNodeFactory.instance.arrayNode();
            for (Field field : schema.fields()) {
                ObjectNode fieldJsonSchema = asJsonSchema(field.schema()).deepCopy();
                fieldJsonSchema.put(JsonSchema.STRUCT_FIELD_NAME_FIELD_NAME, field.name());
                fields.add(fieldJsonSchema);
            }
            jsonSchema.set(JsonSchema.STRUCT_FIELDS_FIELD_NAME, fields);
            break;
        default:
            throw new DataException("Couldn't translate unsupported schema type " + schema + ".");
    }
    jsonSchema.put(JsonSchema.SCHEMA_OPTIONAL_FIELD_NAME, schema.isOptional());
    if (schema.name() != null)
        jsonSchema.put(JsonSchema.SCHEMA_NAME_FIELD_NAME, schema.name());
    if (schema.version() != null)
        jsonSchema.put(JsonSchema.SCHEMA_VERSION_FIELD_NAME, schema.version());
    if (schema.doc() != null)
        jsonSchema.put(JsonSchema.SCHEMA_DOC_FIELD_NAME, schema.doc());
    if (schema.parameters() != null) {
        ObjectNode jsonSchemaParams = JsonNodeFactory.instance.objectNode();
        for (Map.Entry<String, String> prop : schema.parameters().entrySet()) jsonSchemaParams.put(prop.getKey(), prop.getValue());
        jsonSchema.set(JsonSchema.SCHEMA_PARAMETERS_FIELD_NAME, jsonSchemaParams);
    }
    if (schema.defaultValue() != null)
        jsonSchema.set(JsonSchema.SCHEMA_DEFAULT_FIELD_NAME, convertToJson(schema, schema.defaultValue()));
    fromConnectSchemaCache.put(schema, jsonSchema);
    return jsonSchema;
}
Also used : Field(org.apache.kafka.connect.data.Field) DataException(org.apache.kafka.connect.errors.DataException) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) ArrayNode(com.fasterxml.jackson.databind.node.ArrayNode) HashMap(java.util.HashMap) Map(java.util.Map)

Example 9 with DataException

use of org.apache.kafka.connect.errors.DataException in project kafka by apache.

the class JsonConverter method convertToJson.

/**
     * Convert this object, in the org.apache.kafka.connect.data format, into a JSON object, returning both the schema
     * and the converted object.
     */
private static JsonNode convertToJson(Schema schema, Object logicalValue) {
    if (logicalValue == null) {
        if (// Any schema is valid and we don't have a default, so treat this as an optional schema
        schema == null)
            return null;
        if (schema.defaultValue() != null)
            return convertToJson(schema, schema.defaultValue());
        if (schema.isOptional())
            return JsonNodeFactory.instance.nullNode();
        throw new DataException("Conversion error: null value for field that is required and has no default value");
    }
    Object value = logicalValue;
    if (schema != null && schema.name() != null) {
        LogicalTypeConverter logicalConverter = TO_JSON_LOGICAL_CONVERTERS.get(schema.name());
        if (logicalConverter != null)
            value = logicalConverter.convert(schema, logicalValue);
    }
    try {
        final Schema.Type schemaType;
        if (schema == null) {
            schemaType = ConnectSchema.schemaType(value.getClass());
            if (schemaType == null)
                throw new DataException("Java class " + value.getClass() + " does not have corresponding schema type.");
        } else {
            schemaType = schema.type();
        }
        switch(schemaType) {
            case INT8:
                return JsonNodeFactory.instance.numberNode((Byte) value);
            case INT16:
                return JsonNodeFactory.instance.numberNode((Short) value);
            case INT32:
                return JsonNodeFactory.instance.numberNode((Integer) value);
            case INT64:
                return JsonNodeFactory.instance.numberNode((Long) value);
            case FLOAT32:
                return JsonNodeFactory.instance.numberNode((Float) value);
            case FLOAT64:
                return JsonNodeFactory.instance.numberNode((Double) value);
            case BOOLEAN:
                return JsonNodeFactory.instance.booleanNode((Boolean) value);
            case STRING:
                CharSequence charSeq = (CharSequence) value;
                return JsonNodeFactory.instance.textNode(charSeq.toString());
            case BYTES:
                if (value instanceof byte[])
                    return JsonNodeFactory.instance.binaryNode((byte[]) value);
                else if (value instanceof ByteBuffer)
                    return JsonNodeFactory.instance.binaryNode(((ByteBuffer) value).array());
                else
                    throw new DataException("Invalid type for bytes type: " + value.getClass());
            case ARRAY:
                {
                    Collection collection = (Collection) value;
                    ArrayNode list = JsonNodeFactory.instance.arrayNode();
                    for (Object elem : collection) {
                        Schema valueSchema = schema == null ? null : schema.valueSchema();
                        JsonNode fieldValue = convertToJson(valueSchema, elem);
                        list.add(fieldValue);
                    }
                    return list;
                }
            case MAP:
                {
                    Map<?, ?> map = (Map<?, ?>) value;
                    // If true, using string keys and JSON object; if false, using non-string keys and Array-encoding
                    boolean objectMode;
                    if (schema == null) {
                        objectMode = true;
                        for (Map.Entry<?, ?> entry : map.entrySet()) {
                            if (!(entry.getKey() instanceof String)) {
                                objectMode = false;
                                break;
                            }
                        }
                    } else {
                        objectMode = schema.keySchema().type() == Schema.Type.STRING;
                    }
                    ObjectNode obj = null;
                    ArrayNode list = null;
                    if (objectMode)
                        obj = JsonNodeFactory.instance.objectNode();
                    else
                        list = JsonNodeFactory.instance.arrayNode();
                    for (Map.Entry<?, ?> entry : map.entrySet()) {
                        Schema keySchema = schema == null ? null : schema.keySchema();
                        Schema valueSchema = schema == null ? null : schema.valueSchema();
                        JsonNode mapKey = convertToJson(keySchema, entry.getKey());
                        JsonNode mapValue = convertToJson(valueSchema, entry.getValue());
                        if (objectMode)
                            obj.set(mapKey.asText(), mapValue);
                        else
                            list.add(JsonNodeFactory.instance.arrayNode().add(mapKey).add(mapValue));
                    }
                    return objectMode ? obj : list;
                }
            case STRUCT:
                {
                    Struct struct = (Struct) value;
                    if (struct.schema() != schema)
                        throw new DataException("Mismatching schema.");
                    ObjectNode obj = JsonNodeFactory.instance.objectNode();
                    for (Field field : schema.fields()) {
                        obj.set(field.name(), convertToJson(field.schema(), struct.get(field)));
                    }
                    return obj;
                }
        }
        throw new DataException("Couldn't convert " + value + " to JSON.");
    } catch (ClassCastException e) {
        throw new DataException("Invalid type for " + schema.type() + ": " + value.getClass());
    }
}
Also used : ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) Schema(org.apache.kafka.connect.data.Schema) ConnectSchema(org.apache.kafka.connect.data.ConnectSchema) JsonNode(com.fasterxml.jackson.databind.JsonNode) ByteBuffer(java.nio.ByteBuffer) Struct(org.apache.kafka.connect.data.Struct) DataException(org.apache.kafka.connect.errors.DataException) Field(org.apache.kafka.connect.data.Field) Collection(java.util.Collection) ArrayNode(com.fasterxml.jackson.databind.node.ArrayNode) HashMap(java.util.HashMap) Map(java.util.Map)

Example 10 with DataException

use of org.apache.kafka.connect.errors.DataException in project kafka by apache.

the class JsonConverter method convertToConnect.

private static Object convertToConnect(Schema schema, JsonNode jsonValue) {
    final Schema.Type schemaType;
    if (schema != null) {
        schemaType = schema.type();
        if (jsonValue.isNull()) {
            if (schema.defaultValue() != null)
                // any logical type conversions should already have been applied
                return schema.defaultValue();
            if (schema.isOptional())
                return null;
            throw new DataException("Invalid null value for required " + schemaType + " field");
        }
    } else {
        switch(jsonValue.getNodeType()) {
            case NULL:
                // Special case. With no schema
                return null;
            case BOOLEAN:
                schemaType = Schema.Type.BOOLEAN;
                break;
            case NUMBER:
                if (jsonValue.isIntegralNumber())
                    schemaType = Schema.Type.INT64;
                else
                    schemaType = Schema.Type.FLOAT64;
                break;
            case ARRAY:
                schemaType = Schema.Type.ARRAY;
                break;
            case OBJECT:
                schemaType = Schema.Type.MAP;
                break;
            case STRING:
                schemaType = Schema.Type.STRING;
                break;
            case BINARY:
            case MISSING:
            case POJO:
            default:
                schemaType = null;
                break;
        }
    }
    final JsonToConnectTypeConverter typeConverter = TO_CONNECT_CONVERTERS.get(schemaType);
    if (typeConverter == null)
        throw new DataException("Unknown schema type: " + String.valueOf(schemaType));
    Object converted = typeConverter.convert(schema, jsonValue);
    if (schema != null && schema.name() != null) {
        LogicalTypeConverter logicalConverter = TO_CONNECT_LOGICAL_CONVERTERS.get(schema.name());
        if (logicalConverter != null)
            converted = logicalConverter.convert(schema, converted);
    }
    return converted;
}
Also used : DataException(org.apache.kafka.connect.errors.DataException) Schema(org.apache.kafka.connect.data.Schema) ConnectSchema(org.apache.kafka.connect.data.ConnectSchema)

Aggregations

DataException (org.apache.kafka.connect.errors.DataException)11 Map (java.util.Map)5 HashMap (java.util.HashMap)4 ConnectSchema (org.apache.kafka.connect.data.ConnectSchema)4 Schema (org.apache.kafka.connect.data.Schema)4 JsonNode (com.fasterxml.jackson.databind.JsonNode)3 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)3 ArrayNode (com.fasterxml.jackson.databind.node.ArrayNode)2 Calendar (java.util.Calendar)2 Field (org.apache.kafka.connect.data.Field)2 ByteBuffer (java.nio.ByteBuffer)1 Collection (java.util.Collection)1 Date (java.util.Date)1 List (java.util.List)1 SerializationException (org.apache.kafka.common.errors.SerializationException)1 SchemaBuilder (org.apache.kafka.connect.data.SchemaBuilder)1 Struct (org.apache.kafka.connect.data.Struct)1 SchemaBuilderException (org.apache.kafka.connect.errors.SchemaBuilderException)1