use of org.apache.kafka.connect.data.Field in project debezium by debezium.
the class VerifyRecord method valuesFor.
private static Object[] valuesFor(Struct struct) {
Object[] array = new Object[struct.schema().fields().size()];
int index = 0;
for (Field field : struct.schema().fields()) {
array[index] = struct.get(field);
++index;
}
return array;
}
use of org.apache.kafka.connect.data.Field in project debezium by debezium.
the class PostgresSchemaIT method assertSchemaContent.
private void assertSchemaContent(String[] fields, Schema[] types, Schema keySchema) {
IntStream.range(0, fields.length).forEach(i -> {
String fieldName = fields[i].trim();
Field field = keySchema.field(Strings.unquoteIdentifierPart(fieldName));
assertNotNull(fieldName + " not found in schema", field);
assertEquals("'" + fieldName + "' has incorrect schema.", types[i], field.schema());
});
}
use of org.apache.kafka.connect.data.Field in project debezium by debezium.
the class MySqlValueConverters method converter.
@Override
public ValueConverter converter(Column column, Field fieldDefn) {
// Handle a few MySQL-specific types based upon how they are handled by the MySQL binlog client ...
String typeName = column.typeName().toUpperCase();
if (matches(typeName, "JSON")) {
return (data) -> convertJson(column, fieldDefn, data);
}
if (matches(typeName, "GEOMETRY") || matches(typeName, "LINESTRING") || matches(typeName, "POLYGON") || matches(typeName, "MULTIPOINT") || matches(typeName, "MULTILINESTRING") || matches(typeName, "MULTIPOLYGON") || matches(typeName, "GEOMETRYCOLLECTION")) {
return (data -> convertGeometry(column, fieldDefn, data));
}
if (matches(typeName, "POINT")) {
// backwards compatibility
return (data -> convertPoint(column, fieldDefn, data));
}
if (matches(typeName, "YEAR")) {
return (data) -> convertYearToInt(column, fieldDefn, data);
}
if (matches(typeName, "ENUM")) {
// Build up the character array based upon the column's type ...
List<String> options = extractEnumAndSetOptions(column);
return (data) -> convertEnumToString(options, column, fieldDefn, data);
}
if (matches(typeName, "SET")) {
// Build up the character array based upon the column's type ...
List<String> options = extractEnumAndSetOptions(column);
return (data) -> convertSetToString(options, column, fieldDefn, data);
}
if (matches(typeName, "TINYINT UNSIGNED") || matches(typeName, "TINYINT UNSIGNED ZEROFILL")) {
// Convert TINYINT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
return (data) -> convertUnsignedTinyint(column, fieldDefn, data);
}
if (matches(typeName, "SMALLINT UNSIGNED") || matches(typeName, "SMALLINT UNSIGNED ZEROFILL")) {
// Convert SMALLINT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
return (data) -> convertUnsignedSmallint(column, fieldDefn, data);
}
if (matches(typeName, "MEDIUMINT UNSIGNED") || matches(typeName, "MEDIUMINT UNSIGNED ZEROFILL")) {
// Convert SMALLINT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
return (data) -> convertUnsignedMediumint(column, fieldDefn, data);
}
if (matches(typeName, "INT UNSIGNED") || matches(typeName, "INT UNSIGNED ZEROFILL")) {
// Convert INT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
return (data) -> convertUnsignedInt(column, fieldDefn, data);
}
if (matches(typeName, "BIGINT UNSIGNED") || matches(typeName, "BIGINT UNSIGNED ZEROFILL")) {
switch(super.bigIntUnsignedMode) {
case LONG:
return (data) -> convertBigInt(column, fieldDefn, data);
case PRECISE:
// Convert BIGINT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
return (data) -> convertUnsignedBigint(column, fieldDefn, data);
}
}
// We have to convert bytes encoded in the column's character set ...
switch(column.jdbcType()) {
// variable-length
case Types.CHAR:
// variable-length
case Types.VARCHAR:
// variable-length
case Types.LONGVARCHAR:
// variable-length
case Types.CLOB:
// fixed-length
case Types.NCHAR:
// fixed-length
case Types.NVARCHAR:
// fixed-length
case Types.LONGNVARCHAR:
// fixed-length
case Types.NCLOB:
case Types.DATALINK:
case Types.SQLXML:
Charset charset = charsetFor(column);
if (charset != null) {
logger.debug("Using {} charset by default for column: {}", charset, column);
return (data) -> convertString(column, fieldDefn, charset, data);
}
logger.warn("Using UTF-8 charset by default for column without charset: {}", column);
return (data) -> convertString(column, fieldDefn, StandardCharsets.UTF_8, data);
case Types.TIME:
if (adaptiveTimeMicrosecondsPrecisionMode)
return data -> convertDurationToMicroseconds(column, fieldDefn, data);
default:
break;
}
// Otherwise, let the base class handle it ...
return super.converter(column, fieldDefn);
}
use of org.apache.kafka.connect.data.Field in project kafka by apache.
the class Cast method applyWithSchema.
private R applyWithSchema(R record) {
Schema valueSchema = operatingSchema(record);
Schema updatedSchema = getOrBuildSchema(valueSchema);
// Whole-record casting
if (wholeValueCastType != null)
return newRecord(record, updatedSchema, castValueToType(valueSchema, operatingValue(record), wholeValueCastType));
// Casting within a struct
final Struct value = requireStruct(operatingValue(record), PURPOSE);
final Struct updatedValue = new Struct(updatedSchema);
for (Field field : value.schema().fields()) {
final Object origFieldValue = value.get(field);
final Schema.Type targetType = casts.get(field.name());
final Object newFieldValue = targetType != null ? castValueToType(field.schema(), origFieldValue, targetType) : origFieldValue;
log.trace("Cast field '{}' from '{}' to '{}'", field.name(), origFieldValue, newFieldValue);
updatedValue.put(updatedSchema.field(field.name()), newFieldValue);
}
return newRecord(record, updatedSchema, updatedValue);
}
use of org.apache.kafka.connect.data.Field in project kafka by apache.
the class ExtractField method apply.
@Override
public R apply(R record) {
final Schema schema = operatingSchema(record);
if (schema == null) {
final Map<String, Object> value = requireMapOrNull(operatingValue(record), PURPOSE);
return newRecord(record, null, value == null ? null : value.get(fieldName));
} else {
final Struct value = requireStructOrNull(operatingValue(record), PURPOSE);
Field field = schema.field(fieldName);
if (field == null) {
throw new IllegalArgumentException("Unknown field: " + fieldName);
}
return newRecord(record, field.schema(), value == null ? null : value.get(fieldName));
}
}
Aggregations