use of org.apache.kafka.connect.data.Field in project kafka by apache.
the class JsonConverter method asJsonSchema.
public ObjectNode asJsonSchema(Schema schema) {
if (schema == null)
return null;
ObjectNode cached = fromConnectSchemaCache.get(schema);
if (cached != null)
return cached;
final ObjectNode jsonSchema;
switch(schema.type()) {
case BOOLEAN:
jsonSchema = JsonSchema.BOOLEAN_SCHEMA.deepCopy();
break;
case BYTES:
jsonSchema = JsonSchema.BYTES_SCHEMA.deepCopy();
break;
case FLOAT64:
jsonSchema = JsonSchema.DOUBLE_SCHEMA.deepCopy();
break;
case FLOAT32:
jsonSchema = JsonSchema.FLOAT_SCHEMA.deepCopy();
break;
case INT8:
jsonSchema = JsonSchema.INT8_SCHEMA.deepCopy();
break;
case INT16:
jsonSchema = JsonSchema.INT16_SCHEMA.deepCopy();
break;
case INT32:
jsonSchema = JsonSchema.INT32_SCHEMA.deepCopy();
break;
case INT64:
jsonSchema = JsonSchema.INT64_SCHEMA.deepCopy();
break;
case STRING:
jsonSchema = JsonSchema.STRING_SCHEMA.deepCopy();
break;
case ARRAY:
jsonSchema = JSON_NODE_FACTORY.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.ARRAY_TYPE_NAME);
jsonSchema.set(JsonSchema.ARRAY_ITEMS_FIELD_NAME, asJsonSchema(schema.valueSchema()));
break;
case MAP:
jsonSchema = JSON_NODE_FACTORY.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.MAP_TYPE_NAME);
jsonSchema.set(JsonSchema.MAP_KEY_FIELD_NAME, asJsonSchema(schema.keySchema()));
jsonSchema.set(JsonSchema.MAP_VALUE_FIELD_NAME, asJsonSchema(schema.valueSchema()));
break;
case STRUCT:
jsonSchema = JSON_NODE_FACTORY.objectNode().put(JsonSchema.SCHEMA_TYPE_FIELD_NAME, JsonSchema.STRUCT_TYPE_NAME);
ArrayNode fields = JSON_NODE_FACTORY.arrayNode();
for (Field field : schema.fields()) {
ObjectNode fieldJsonSchema = asJsonSchema(field.schema()).deepCopy();
fieldJsonSchema.put(JsonSchema.STRUCT_FIELD_NAME_FIELD_NAME, field.name());
fields.add(fieldJsonSchema);
}
jsonSchema.set(JsonSchema.STRUCT_FIELDS_FIELD_NAME, fields);
break;
default:
throw new DataException("Couldn't translate unsupported schema type " + schema + ".");
}
jsonSchema.put(JsonSchema.SCHEMA_OPTIONAL_FIELD_NAME, schema.isOptional());
if (schema.name() != null)
jsonSchema.put(JsonSchema.SCHEMA_NAME_FIELD_NAME, schema.name());
if (schema.version() != null)
jsonSchema.put(JsonSchema.SCHEMA_VERSION_FIELD_NAME, schema.version());
if (schema.doc() != null)
jsonSchema.put(JsonSchema.SCHEMA_DOC_FIELD_NAME, schema.doc());
if (schema.parameters() != null) {
ObjectNode jsonSchemaParams = JSON_NODE_FACTORY.objectNode();
for (Map.Entry<String, String> prop : schema.parameters().entrySet()) jsonSchemaParams.put(prop.getKey(), prop.getValue());
jsonSchema.set(JsonSchema.SCHEMA_PARAMETERS_FIELD_NAME, jsonSchemaParams);
}
if (schema.defaultValue() != null)
jsonSchema.set(JsonSchema.SCHEMA_DEFAULT_FIELD_NAME, convertToJson(schema, schema.defaultValue()));
fromConnectSchemaCache.put(schema, jsonSchema);
return jsonSchema;
}
use of org.apache.kafka.connect.data.Field in project kafka by apache.
the class TimestampConverter method applyValueWithSchema.
private Struct applyValueWithSchema(Struct value, Schema updatedSchema) {
if (value == null) {
return null;
}
Struct updatedValue = new Struct(updatedSchema);
for (Field field : value.schema().fields()) {
final Object updatedFieldValue;
if (field.name().equals(config.field)) {
updatedFieldValue = convertTimestamp(value.get(field), timestampTypeFromSchema(field.schema()));
} else {
updatedFieldValue = value.get(field);
}
updatedValue.put(field.name(), updatedFieldValue);
}
return updatedValue;
}
use of org.apache.kafka.connect.data.Field in project kafka by apache.
the class TimestampConverter method applyWithSchema.
private R applyWithSchema(R record) {
final Schema schema = operatingSchema(record);
if (config.field.isEmpty()) {
Object value = operatingValue(record);
// New schema is determined by the requested target timestamp type
Schema updatedSchema = TRANSLATORS.get(config.type).typeSchema(schema.isOptional());
return newRecord(record, updatedSchema, convertTimestamp(value, timestampTypeFromSchema(schema)));
} else {
final Struct value = requireStructOrNull(operatingValue(record), PURPOSE);
Schema updatedSchema = schemaUpdateCache.get(schema);
if (updatedSchema == null) {
SchemaBuilder builder = SchemaUtil.copySchemaBasics(schema, SchemaBuilder.struct());
for (Field field : schema.fields()) {
if (field.name().equals(config.field)) {
builder.field(field.name(), TRANSLATORS.get(config.type).typeSchema(field.schema().isOptional()));
} else {
builder.field(field.name(), field.schema());
}
}
if (schema.isOptional())
builder.optional();
if (schema.defaultValue() != null) {
Struct updatedDefaultValue = applyValueWithSchema((Struct) schema.defaultValue(), builder);
builder.defaultValue(updatedDefaultValue);
}
updatedSchema = builder.build();
schemaUpdateCache.put(schema, updatedSchema);
}
Struct updatedValue = applyValueWithSchema(value, updatedSchema);
return newRecord(record, updatedSchema, updatedValue);
}
}
use of org.apache.kafka.connect.data.Field in project kafka by apache.
the class ValueToKey method applyWithSchema.
private R applyWithSchema(R record) {
final Struct value = requireStruct(record.value(), PURPOSE);
Schema keySchema = valueToKeySchemaCache.get(value.schema());
if (keySchema == null) {
final SchemaBuilder keySchemaBuilder = SchemaBuilder.struct();
for (String field : fields) {
final Field fieldFromValue = value.schema().field(field);
if (fieldFromValue == null) {
throw new DataException("Field does not exist: " + field);
}
keySchemaBuilder.field(field, fieldFromValue.schema());
}
keySchema = keySchemaBuilder.build();
valueToKeySchemaCache.put(value.schema(), keySchema);
}
final Struct key = new Struct(keySchema);
for (String field : fields) {
key.put(field, value.get(field));
}
return record.newRecord(record.topic(), record.kafkaPartition(), keySchema, key, value.schema(), value, record.timestamp());
}
use of org.apache.kafka.connect.data.Field in project debezium by debezium.
the class PostgresValueConverter method createArrayConverter.
private ValueConverter createArrayConverter(Column column, Field fieldDefn) {
PostgresType arrayType = typeRegistry.get(column.nativeType());
PostgresType elementType = arrayType.getElementType();
final String elementTypeName = elementType.getName();
final String elementColumnName = column.name() + "-element";
final Column elementColumn = Column.editor().name(elementColumnName).jdbcType(elementType.getJdbcId()).nativeType(elementType.getOid()).type(elementTypeName).optional(true).scale(column.scale()).length(column.length()).create();
Schema elementSchema = schemaBuilder(elementColumn).optional().build();
final Field elementField = new Field(elementColumnName, 0, elementSchema);
final ValueConverter elementConverter = converter(elementColumn, elementField);
return data -> convertArray(column, fieldDefn, elementConverter, data);
}
Aggregations