use of org.apache.kafka.connect.data.Field in project kafka by apache.
the class HeaderFrom method moveSchema.
private Schema moveSchema(Schema operatingSchema) {
Schema moveSchema = this.moveSchemaCache.get(operatingSchema);
if (moveSchema == null) {
final SchemaBuilder builder = SchemaUtil.copySchemaBasics(operatingSchema, SchemaBuilder.struct());
for (Field field : operatingSchema.fields()) {
if (!fields.contains(field.name())) {
builder.field(field.name(), field.schema());
}
}
moveSchema = builder.build();
moveSchemaCache.put(operatingSchema, moveSchema);
}
return moveSchema;
}
use of org.apache.kafka.connect.data.Field in project kafka by apache.
the class InsertField method applyWithSchema.
private R applyWithSchema(R record) {
final Struct value = requireStruct(operatingValue(record), PURPOSE);
Schema updatedSchema = schemaUpdateCache.get(value.schema());
if (updatedSchema == null) {
updatedSchema = makeUpdatedSchema(value.schema());
schemaUpdateCache.put(value.schema(), updatedSchema);
}
final Struct updatedValue = new Struct(updatedSchema);
for (Field field : value.schema().fields()) {
updatedValue.put(field.name(), value.get(field));
}
if (topicField != null) {
updatedValue.put(topicField.name, record.topic());
}
if (partitionField != null && record.kafkaPartition() != null) {
updatedValue.put(partitionField.name, record.kafkaPartition());
}
if (offsetField != null) {
updatedValue.put(offsetField.name, requireSinkRecord(record, PURPOSE).kafkaOffset());
}
if (timestampField != null && record.timestamp() != null) {
updatedValue.put(timestampField.name, new Date(record.timestamp()));
}
if (staticField != null && staticValue != null) {
updatedValue.put(staticField.name, staticValue);
}
return newRecord(record, updatedSchema, updatedValue);
}
use of org.apache.kafka.connect.data.Field in project kafka by apache.
the class MaskField method applyWithSchema.
private R applyWithSchema(R record) {
final Struct value = requireStruct(operatingValue(record), PURPOSE);
final Struct updatedValue = new Struct(value.schema());
for (Field field : value.schema().fields()) {
final Object origFieldValue = value.get(field);
updatedValue.put(field, maskedFields.contains(field.name()) ? masked(origFieldValue) : origFieldValue);
}
return newRecord(record, updatedValue);
}
use of org.apache.kafka.connect.data.Field in project kafka by apache.
the class HeaderFrom method applyWithSchema.
private R applyWithSchema(R record, Object operatingValue, Schema operatingSchema) {
Headers updatedHeaders = record.headers().duplicate();
Struct value = Requirements.requireStruct(operatingValue, "header " + operation);
final Schema updatedSchema;
final Struct updatedValue;
if (operation == Operation.MOVE) {
updatedSchema = moveSchema(operatingSchema);
updatedValue = new Struct(updatedSchema);
for (Field field : updatedSchema.fields()) {
updatedValue.put(field, value.get(field.name()));
}
} else {
updatedSchema = operatingSchema;
updatedValue = value;
}
for (int i = 0; i < fields.size(); i++) {
String fieldName = fields.get(i);
String headerName = headers.get(i);
Object fieldValue = value.schema().field(fieldName) != null ? value.get(fieldName) : null;
Schema fieldSchema = operatingSchema.field(fieldName).schema();
updatedHeaders.add(headerName, fieldValue, fieldSchema);
}
return newRecord(record, updatedSchema, updatedValue, updatedHeaders);
}
use of org.apache.kafka.connect.data.Field in project kafka by apache.
the class JsonConverter method convertToJson.
/**
* Convert this object, in the org.apache.kafka.connect.data format, into a JSON object, returning both the schema
* and the converted object.
*/
private JsonNode convertToJson(Schema schema, Object value) {
if (value == null) {
if (// Any schema is valid and we don't have a default, so treat this as an optional schema
schema == null)
return null;
if (schema.defaultValue() != null)
return convertToJson(schema, schema.defaultValue());
if (schema.isOptional())
return JSON_NODE_FACTORY.nullNode();
throw new DataException("Conversion error: null value for field that is required and has no default value");
}
if (schema != null && schema.name() != null) {
LogicalTypeConverter logicalConverter = LOGICAL_CONVERTERS.get(schema.name());
if (logicalConverter != null)
return logicalConverter.toJson(schema, value, config);
}
try {
final Schema.Type schemaType;
if (schema == null) {
schemaType = ConnectSchema.schemaType(value.getClass());
if (schemaType == null)
throw new DataException("Java class " + value.getClass() + " does not have corresponding schema type.");
} else {
schemaType = schema.type();
}
switch(schemaType) {
case INT8:
return JSON_NODE_FACTORY.numberNode((Byte) value);
case INT16:
return JSON_NODE_FACTORY.numberNode((Short) value);
case INT32:
return JSON_NODE_FACTORY.numberNode((Integer) value);
case INT64:
return JSON_NODE_FACTORY.numberNode((Long) value);
case FLOAT32:
return JSON_NODE_FACTORY.numberNode((Float) value);
case FLOAT64:
return JSON_NODE_FACTORY.numberNode((Double) value);
case BOOLEAN:
return JSON_NODE_FACTORY.booleanNode((Boolean) value);
case STRING:
CharSequence charSeq = (CharSequence) value;
return JSON_NODE_FACTORY.textNode(charSeq.toString());
case BYTES:
if (value instanceof byte[])
return JSON_NODE_FACTORY.binaryNode((byte[]) value);
else if (value instanceof ByteBuffer)
return JSON_NODE_FACTORY.binaryNode(((ByteBuffer) value).array());
else
throw new DataException("Invalid type for bytes type: " + value.getClass());
case ARRAY:
{
Collection<?> collection = (Collection<?>) value;
ArrayNode list = JSON_NODE_FACTORY.arrayNode();
for (Object elem : collection) {
Schema valueSchema = schema == null ? null : schema.valueSchema();
JsonNode fieldValue = convertToJson(valueSchema, elem);
list.add(fieldValue);
}
return list;
}
case MAP:
{
Map<?, ?> map = (Map<?, ?>) value;
// If true, using string keys and JSON object; if false, using non-string keys and Array-encoding
boolean objectMode;
if (schema == null) {
objectMode = true;
for (Map.Entry<?, ?> entry : map.entrySet()) {
if (!(entry.getKey() instanceof String)) {
objectMode = false;
break;
}
}
} else {
objectMode = schema.keySchema().type() == Schema.Type.STRING;
}
ObjectNode obj = null;
ArrayNode list = null;
if (objectMode)
obj = JSON_NODE_FACTORY.objectNode();
else
list = JSON_NODE_FACTORY.arrayNode();
for (Map.Entry<?, ?> entry : map.entrySet()) {
Schema keySchema = schema == null ? null : schema.keySchema();
Schema valueSchema = schema == null ? null : schema.valueSchema();
JsonNode mapKey = convertToJson(keySchema, entry.getKey());
JsonNode mapValue = convertToJson(valueSchema, entry.getValue());
if (objectMode)
obj.set(mapKey.asText(), mapValue);
else
list.add(JSON_NODE_FACTORY.arrayNode().add(mapKey).add(mapValue));
}
return objectMode ? obj : list;
}
case STRUCT:
{
Struct struct = (Struct) value;
if (!struct.schema().equals(schema))
throw new DataException("Mismatching schema.");
ObjectNode obj = JSON_NODE_FACTORY.objectNode();
for (Field field : schema.fields()) {
obj.set(field.name(), convertToJson(field.schema(), struct.get(field)));
}
return obj;
}
}
throw new DataException("Couldn't convert " + value + " to JSON.");
} catch (ClassCastException e) {
String schemaTypeStr = (schema != null) ? schema.type().toString() : "unknown schema";
throw new DataException("Invalid type for " + schemaTypeStr + ": " + value.getClass());
}
}
Aggregations