use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class MaxwellJsonDeserializationSchema method deserialize.
@Override
public void deserialize(byte[] message, Collector<RowData> out) throws IOException {
if (message == null || message.length == 0) {
return;
}
try {
final JsonNode root = jsonDeserializer.deserializeToJsonNode(message);
final GenericRowData row = (GenericRowData) jsonDeserializer.convertToRowData(root);
// "type" field
String type = row.getString(2).toString();
if (OP_INSERT.equals(type)) {
// "data" field is a row, contains inserted rows
GenericRowData insert = (GenericRowData) row.getRow(0, fieldCount);
insert.setRowKind(RowKind.INSERT);
emitRow(row, insert, out);
} else if (OP_UPDATE.equals(type)) {
// "data" field is a row, contains new rows
// "old" field is a row, contains old values
// the underlying JSON deserialization schema always produce GenericRowData.
// "data" field
GenericRowData after = (GenericRowData) row.getRow(0, fieldCount);
// "old" field
GenericRowData before = (GenericRowData) row.getRow(1, fieldCount);
final JsonNode oldField = root.get(FIELD_OLD);
for (int f = 0; f < fieldCount; f++) {
if (before.isNullAt(f) && oldField.findValue(fieldNames.get(f)) == null) {
// not null fields in "old" (before) means the fields are changed
// null/empty fields in "old" (before) means the fields are not changed
// so we just copy the not changed fields into before
before.setField(f, after.getField(f));
}
}
before.setRowKind(RowKind.UPDATE_BEFORE);
after.setRowKind(RowKind.UPDATE_AFTER);
emitRow(row, before, out);
emitRow(row, after, out);
} else if (OP_DELETE.equals(type)) {
// "data" field is a row, contains deleted rows
GenericRowData delete = (GenericRowData) row.getRow(0, fieldCount);
delete.setRowKind(RowKind.DELETE);
emitRow(row, delete, out);
} else {
if (!ignoreParseErrors) {
throw new IOException(format("Unknown \"type\" value \"%s\". The Maxwell JSON message is '%s'", type, new String(message)));
}
}
} catch (Throwable t) {
// a big try catch to protect the processing.
if (!ignoreParseErrors) {
throw new IOException(format("Corrupt Maxwell JSON message '%s'.", new String(message)), t);
}
}
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class HiveTableInputFormat method reopen.
@Override
public void reopen(HiveTableInputSplit split, Long state) throws IOException {
this.open(split);
this.currentReadCount = state;
this.reader.seekToRow(state, new GenericRowData(selectedFields.length));
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class DynamicKafkaRecordSerializationSchema method createProjectedRow.
static RowData createProjectedRow(RowData consumedRow, RowKind kind, RowData.FieldGetter[] fieldGetters) {
final int arity = fieldGetters.length;
final GenericRowData genericRowData = new GenericRowData(kind, arity);
for (int fieldPos = 0; fieldPos < arity; fieldPos++) {
genericRowData.setField(fieldPos, fieldGetters[fieldPos].getFieldOrNull(consumedRow));
}
return genericRowData;
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class JsonToRowDataConverters method createRowConverter.
public JsonToRowDataConverter createRowConverter(RowType rowType) {
final JsonToRowDataConverter[] fieldConverters = rowType.getFields().stream().map(RowType.RowField::getType).map(this::createConverter).toArray(JsonToRowDataConverter[]::new);
final String[] fieldNames = rowType.getFieldNames().toArray(new String[0]);
return jsonNode -> {
ObjectNode node = (ObjectNode) jsonNode;
int arity = fieldNames.length;
GenericRowData row = new GenericRowData(arity);
for (int i = 0; i < arity; i++) {
String fieldName = fieldNames[i];
JsonNode field = node.get(fieldName);
try {
Object convertedField = convertField(fieldConverters[i], fieldName, field);
row.setField(i, convertedField);
} catch (Throwable t) {
throw new JsonParseException(String.format("Fail to deserialize at field: %s.", fieldName), t);
}
}
return row;
};
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class DebeziumJsonDeserializationSchema method deserialize.
@Override
public void deserialize(byte[] message, Collector<RowData> out) throws IOException {
if (message == null || message.length == 0) {
// skip tombstone messages
return;
}
try {
GenericRowData row = (GenericRowData) jsonDeserializer.deserialize(message);
GenericRowData payload;
if (schemaInclude) {
payload = (GenericRowData) row.getField(0);
} else {
payload = row;
}
GenericRowData before = (GenericRowData) payload.getField(0);
GenericRowData after = (GenericRowData) payload.getField(1);
String op = payload.getField(2).toString();
if (OP_CREATE.equals(op) || OP_READ.equals(op)) {
after.setRowKind(RowKind.INSERT);
emitRow(row, after, out);
} else if (OP_UPDATE.equals(op)) {
if (before == null) {
throw new IllegalStateException(String.format(REPLICA_IDENTITY_EXCEPTION, "UPDATE"));
}
before.setRowKind(RowKind.UPDATE_BEFORE);
after.setRowKind(RowKind.UPDATE_AFTER);
emitRow(row, before, out);
emitRow(row, after, out);
} else if (OP_DELETE.equals(op)) {
if (before == null) {
throw new IllegalStateException(String.format(REPLICA_IDENTITY_EXCEPTION, "DELETE"));
}
before.setRowKind(RowKind.DELETE);
emitRow(row, before, out);
} else {
if (!ignoreParseErrors) {
throw new IOException(format("Unknown \"op\" value \"%s\". The Debezium JSON message is '%s'", op, new String(message)));
}
}
} catch (Throwable t) {
// a big try catch to protect the processing.
if (!ignoreParseErrors) {
throw new IOException(format("Corrupt Debezium JSON message '%s'.", new String(message)), t);
}
}
}
Aggregations