use of org.apache.kafka.connect.data.Field in project apache-kafka-on-k8s by banzaicloud.
the class Flatten method buildUpdatedSchema.
/**
* Build an updated Struct Schema which flattens all nested fields into a single struct, handling cases where
* optionality and default values of the flattened fields are affected by the optionality and default values of
* parent/ancestor schemas (e.g. flattened field is optional because the parent schema was optional, even if the
* schema itself is marked as required).
* @param schema the schema to translate
* @param fieldNamePrefix the prefix to use on field names, i.e. the delimiter-joined set of ancestor field names
* @param newSchema the flattened schema being built
* @param optional true if any ancestor schema is optional
* @param defaultFromParent the default value, if any, included via the parent/ancestor schemas
*/
private void buildUpdatedSchema(Schema schema, String fieldNamePrefix, SchemaBuilder newSchema, boolean optional, Struct defaultFromParent) {
for (Field field : schema.fields()) {
final String fieldName = fieldName(fieldNamePrefix, field.name());
final boolean fieldIsOptional = optional || field.schema().isOptional();
Object fieldDefaultValue = null;
if (field.schema().defaultValue() != null) {
fieldDefaultValue = field.schema().defaultValue();
} else if (defaultFromParent != null) {
fieldDefaultValue = defaultFromParent.get(field);
}
switch(field.schema().type()) {
case INT8:
case INT16:
case INT32:
case INT64:
case FLOAT32:
case FLOAT64:
case BOOLEAN:
case STRING:
case BYTES:
newSchema.field(fieldName, convertFieldSchema(field.schema(), fieldIsOptional, fieldDefaultValue));
break;
case STRUCT:
buildUpdatedSchema(field.schema(), fieldName, newSchema, fieldIsOptional, (Struct) fieldDefaultValue);
break;
default:
throw new DataException("Flatten transformation does not support " + field.schema().type() + " for record without schemas (for field " + fieldName + ").");
}
}
}
use of org.apache.kafka.connect.data.Field in project apache-kafka-on-k8s by banzaicloud.
the class JsonConverter method convertToJson.
/**
* Convert this object, in the org.apache.kafka.connect.data format, into a JSON object, returning both the schema
* and the converted object.
*/
private static JsonNode convertToJson(Schema schema, Object logicalValue) {
if (logicalValue == null) {
if (// Any schema is valid and we don't have a default, so treat this as an optional schema
schema == null)
return null;
if (schema.defaultValue() != null)
return convertToJson(schema, schema.defaultValue());
if (schema.isOptional())
return JsonNodeFactory.instance.nullNode();
throw new DataException("Conversion error: null value for field that is required and has no default value");
}
Object value = logicalValue;
if (schema != null && schema.name() != null) {
LogicalTypeConverter logicalConverter = TO_JSON_LOGICAL_CONVERTERS.get(schema.name());
if (logicalConverter != null)
value = logicalConverter.convert(schema, logicalValue);
}
try {
final Schema.Type schemaType;
if (schema == null) {
schemaType = ConnectSchema.schemaType(value.getClass());
if (schemaType == null)
throw new DataException("Java class " + value.getClass() + " does not have corresponding schema type.");
} else {
schemaType = schema.type();
}
switch(schemaType) {
case INT8:
return JsonNodeFactory.instance.numberNode((Byte) value);
case INT16:
return JsonNodeFactory.instance.numberNode((Short) value);
case INT32:
return JsonNodeFactory.instance.numberNode((Integer) value);
case INT64:
return JsonNodeFactory.instance.numberNode((Long) value);
case FLOAT32:
return JsonNodeFactory.instance.numberNode((Float) value);
case FLOAT64:
return JsonNodeFactory.instance.numberNode((Double) value);
case BOOLEAN:
return JsonNodeFactory.instance.booleanNode((Boolean) value);
case STRING:
CharSequence charSeq = (CharSequence) value;
return JsonNodeFactory.instance.textNode(charSeq.toString());
case BYTES:
if (value instanceof byte[])
return JsonNodeFactory.instance.binaryNode((byte[]) value);
else if (value instanceof ByteBuffer)
return JsonNodeFactory.instance.binaryNode(((ByteBuffer) value).array());
else
throw new DataException("Invalid type for bytes type: " + value.getClass());
case ARRAY:
{
Collection collection = (Collection) value;
ArrayNode list = JsonNodeFactory.instance.arrayNode();
for (Object elem : collection) {
Schema valueSchema = schema == null ? null : schema.valueSchema();
JsonNode fieldValue = convertToJson(valueSchema, elem);
list.add(fieldValue);
}
return list;
}
case MAP:
{
Map<?, ?> map = (Map<?, ?>) value;
// If true, using string keys and JSON object; if false, using non-string keys and Array-encoding
boolean objectMode;
if (schema == null) {
objectMode = true;
for (Map.Entry<?, ?> entry : map.entrySet()) {
if (!(entry.getKey() instanceof String)) {
objectMode = false;
break;
}
}
} else {
objectMode = schema.keySchema().type() == Schema.Type.STRING;
}
ObjectNode obj = null;
ArrayNode list = null;
if (objectMode)
obj = JsonNodeFactory.instance.objectNode();
else
list = JsonNodeFactory.instance.arrayNode();
for (Map.Entry<?, ?> entry : map.entrySet()) {
Schema keySchema = schema == null ? null : schema.keySchema();
Schema valueSchema = schema == null ? null : schema.valueSchema();
JsonNode mapKey = convertToJson(keySchema, entry.getKey());
JsonNode mapValue = convertToJson(valueSchema, entry.getValue());
if (objectMode)
obj.set(mapKey.asText(), mapValue);
else
list.add(JsonNodeFactory.instance.arrayNode().add(mapKey).add(mapValue));
}
return objectMode ? obj : list;
}
case STRUCT:
{
Struct struct = (Struct) value;
if (!struct.schema().equals(schema))
throw new DataException("Mismatching schema.");
ObjectNode obj = JsonNodeFactory.instance.objectNode();
for (Field field : schema.fields()) {
obj.set(field.name(), convertToJson(field.schema(), struct.get(field)));
}
return obj;
}
}
throw new DataException("Couldn't convert " + value + " to JSON.");
} catch (ClassCastException e) {
String schemaTypeStr = (schema != null) ? schema.type().toString() : "unknown schema";
throw new DataException("Invalid type for " + schemaTypeStr + ": " + value.getClass());
}
}
use of org.apache.kafka.connect.data.Field in project debezium by debezium.
the class Envelope method operationFor.
/**
* Obtain the operation for the given source record.
*
* @param record the source record; may not be null
* @return the operation, or null if no valid operation was found in the record
*/
public static Operation operationFor(SourceRecord record) {
Struct value = (Struct) record.value();
Field opField = value.schema().field(FieldName.OPERATION);
if (opField != null) {
return Operation.forCode(value.getString(opField.name()));
}
return null;
}
use of org.apache.kafka.connect.data.Field in project debezium by debezium.
the class EnvelopeTest method assertField.
protected void assertField(Field field, String fieldName, Schema expectedSchema, boolean optional) {
assertThat(field.name()).isEqualTo(fieldName);
Schema schema = field.schema();
assertThat(schema.name()).isEqualTo(expectedSchema.name());
assertThat(schema.doc()).isEqualTo(expectedSchema.doc());
assertThat(schema.parameters()).isEqualTo(expectedSchema.parameters());
assertThat(schema.version()).isEqualTo(expectedSchema.version());
assertThat(schema.isOptional()).isEqualTo(optional);
switch(expectedSchema.type()) {
case STRUCT:
for (Field f : expectedSchema.fields()) {
assertField(schema.field(f.name()), f.name(), f.schema(), f.schema().isOptional());
}
break;
default:
}
}
use of org.apache.kafka.connect.data.Field in project debezium by debezium.
the class SchemaChangeHistory method getAffectedDatabase.
protected String getAffectedDatabase(SourceRecord record) {
Struct envelope = (Struct) record.value();
Field dbField = envelope.schema().field(HistoryRecord.Fields.DATABASE_NAME);
if (dbField != null) {
return envelope.getString(dbField.name());
}
return null;
}
Aggregations