use of org.apache.kafka.connect.data.Field in project ksql by confluentinc.
the class SchemaUtil method getSchemaDefinitionString.
public static String getSchemaDefinitionString(Schema schema) {
StringBuilder stringBuilder = new StringBuilder("[");
boolean addComma = false;
for (Field field : schema.fields()) {
if (addComma) {
stringBuilder.append(" , ");
} else {
addComma = true;
}
stringBuilder.append(field.name()).append(" : ").append(field.schema().type());
}
stringBuilder.append("]");
return stringBuilder.toString();
}
use of org.apache.kafka.connect.data.Field in project ksql by confluentinc.
the class SchemaUtil method getFieldIndexByName.
public static int getFieldIndexByName(final Schema schema, final String fieldName) {
if (schema.fields() == null) {
return -1;
}
for (int i = 0; i < schema.fields().size(); i++) {
Field field = schema.fields().get(i);
int dotIndex = field.name().indexOf('.');
if (dotIndex == -1) {
if (field.name().equals(fieldName)) {
return i;
}
} else {
if (dotIndex < fieldName.length()) {
String fieldNameWithDot = fieldName.substring(0, dotIndex) + "." + fieldName.substring(dotIndex + 1);
if (field.name().equals(fieldNameWithDot)) {
return i;
}
}
}
}
return -1;
}
use of org.apache.kafka.connect.data.Field in project ksql by confluentinc.
the class SchemaUtil method addImplicitRowTimeRowKeyToSchema.
public static Schema addImplicitRowTimeRowKeyToSchema(Schema schema) {
SchemaBuilder schemaBuilder = SchemaBuilder.struct();
schemaBuilder.field(SchemaUtil.ROWTIME_NAME, Schema.INT64_SCHEMA);
schemaBuilder.field(SchemaUtil.ROWKEY_NAME, Schema.STRING_SCHEMA);
for (Field field : schema.fields()) {
if (!field.name().equals(SchemaUtil.ROWKEY_NAME) && !field.name().equals(SchemaUtil.ROWTIME_NAME)) {
schemaBuilder.field(field.name(), field.schema());
}
}
return schemaBuilder.build();
}
use of org.apache.kafka.connect.data.Field in project ksql by confluentinc.
the class KsqlJsonDeserializer method getGenericRow.
@SuppressWarnings("unchecked")
private GenericRow getGenericRow(byte[] rowJsonBytes) throws IOException {
JsonNode jsonNode = objectMapper.readTree(rowJsonBytes);
CaseInsensitiveJsonNode caseInsensitiveJsonNode = new CaseInsensitiveJsonNode(jsonNode);
Map<String, String> keyMap = caseInsensitiveJsonNode.keyMap;
List columns = new ArrayList();
for (Field field : schema.fields()) {
String jsonFieldName = field.name().substring(field.name().indexOf(".") + 1);
JsonNode fieldJsonNode = jsonNode.get(keyMap.get(jsonFieldName));
if (fieldJsonNode == null) {
columns.add(null);
} else {
columns.add(enforceFieldType(field.schema(), fieldJsonNode));
}
}
return new GenericRow(columns);
}
use of org.apache.kafka.connect.data.Field in project apache-kafka-on-k8s by banzaicloud.
the class InsertField method applyWithSchema.
private R applyWithSchema(R record) {
final Struct value = requireStruct(operatingValue(record), PURPOSE);
Schema updatedSchema = schemaUpdateCache.get(value.schema());
if (updatedSchema == null) {
updatedSchema = makeUpdatedSchema(value.schema());
schemaUpdateCache.put(value.schema(), updatedSchema);
}
final Struct updatedValue = new Struct(updatedSchema);
for (Field field : value.schema().fields()) {
updatedValue.put(field.name(), value.get(field));
}
if (topicField != null) {
updatedValue.put(topicField.name, record.topic());
}
if (partitionField != null && record.kafkaPartition() != null) {
updatedValue.put(partitionField.name, record.kafkaPartition());
}
if (offsetField != null) {
updatedValue.put(offsetField.name, requireSinkRecord(record, PURPOSE).kafkaOffset());
}
if (timestampField != null && record.timestamp() != null) {
updatedValue.put(timestampField.name, new Date(record.timestamp()));
}
if (staticField != null && staticValue != null) {
updatedValue.put(staticField.name, staticValue);
}
return newRecord(record, updatedSchema, updatedValue);
}
Aggregations