use of org.apache.flink.table.data.ArrayData in project flink by apache.
the class RowDataToAvroConverters method createMapConverter.
private static RowDataToAvroConverter createMapConverter(LogicalType type) {
LogicalType valueType = extractValueTypeToAvroMap(type);
final ArrayData.ElementGetter valueGetter = ArrayData.createElementGetter(valueType);
final RowDataToAvroConverter valueConverter = createConverter(valueType);
return new RowDataToAvroConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object convert(Schema schema, Object object) {
final Schema valueSchema = schema.getValueType();
final MapData mapData = (MapData) object;
final ArrayData keyArray = mapData.keyArray();
final ArrayData valueArray = mapData.valueArray();
final Map<Object, Object> map = new HashMap<>(mapData.size());
for (int i = 0; i < mapData.size(); ++i) {
final String key = keyArray.getString(i).toString();
final Object value = valueConverter.convert(valueSchema, valueGetter.getElementOrNull(valueArray, i));
map.put(key, value);
}
return map;
}
};
}
use of org.apache.flink.table.data.ArrayData in project flink by apache.
the class RowDataVectorizer method convert.
/**
* Converting ArrayData to RowData for calling {@link RowDataVectorizer#setColumn(int,
* ColumnVector, LogicalType, RowData, int)} recursively with array.
*
* @param arrayData input ArrayData.
* @param arrayFieldType LogicalType of input ArrayData.
* @return RowData.
*/
private static RowData convert(ArrayData arrayData, LogicalType arrayFieldType) {
GenericRowData rowData = new GenericRowData(arrayData.size());
ArrayData.ElementGetter elementGetter = ArrayData.createElementGetter(arrayFieldType);
for (int i = 0; i < arrayData.size(); i++) {
rowData.setField(i, elementGetter.getElementOrNull(arrayData, i));
}
return rowData;
}
use of org.apache.flink.table.data.ArrayData in project flink by apache.
the class RowDataToJsonConverters method createArrayConverter.
private RowDataToJsonConverter createArrayConverter(ArrayType type) {
final LogicalType elementType = type.getElementType();
final RowDataToJsonConverter elementConverter = createConverter(elementType);
final ArrayData.ElementGetter elementGetter = ArrayData.createElementGetter(elementType);
return (mapper, reuse, value) -> {
ArrayNode node;
// reuse could be a NullNode if last record is null.
if (reuse == null || reuse.isNull()) {
node = mapper.createArrayNode();
} else {
node = (ArrayNode) reuse;
node.removeAll();
}
ArrayData array = (ArrayData) value;
int numElements = array.size();
for (int i = 0; i < numElements; i++) {
Object element = elementGetter.getElementOrNull(array, i);
node.add(elementConverter.convert(mapper, null, element));
}
return node;
};
}
use of org.apache.flink.table.data.ArrayData in project flink by apache.
the class CanalJsonDeserializationSchema method deserialize.
@Override
public void deserialize(@Nullable byte[] message, Collector<RowData> out) throws IOException {
if (message == null || message.length == 0) {
return;
}
try {
final JsonNode root = jsonDeserializer.deserializeToJsonNode(message);
if (database != null) {
if (!databasePattern.matcher(root.get(ReadableMetadata.DATABASE.key).asText()).matches()) {
return;
}
}
if (table != null) {
if (!tablePattern.matcher(root.get(ReadableMetadata.TABLE.key).asText()).matches()) {
return;
}
}
final GenericRowData row = (GenericRowData) jsonDeserializer.convertToRowData(root);
// "type" field
String type = row.getString(2).toString();
if (OP_INSERT.equals(type)) {
// "data" field is an array of row, contains inserted rows
ArrayData data = row.getArray(0);
for (int i = 0; i < data.size(); i++) {
GenericRowData insert = (GenericRowData) data.getRow(i, fieldCount);
insert.setRowKind(RowKind.INSERT);
emitRow(row, insert, out);
}
} else if (OP_UPDATE.equals(type)) {
// "data" field is an array of row, contains new rows
ArrayData data = row.getArray(0);
// "old" field is an array of row, contains old values
ArrayData old = row.getArray(1);
for (int i = 0; i < data.size(); i++) {
// the underlying JSON deserialization schema always produce GenericRowData.
GenericRowData after = (GenericRowData) data.getRow(i, fieldCount);
GenericRowData before = (GenericRowData) old.getRow(i, fieldCount);
final JsonNode oldField = root.get(FIELD_OLD);
for (int f = 0; f < fieldCount; f++) {
if (before.isNullAt(f) && oldField.findValue(fieldNames.get(f)) == null) {
// fields in "old" (before) means the fields are changed
// fields not in "old" (before) means the fields are not changed
// so we just copy the not changed fields into before
before.setField(f, after.getField(f));
}
}
before.setRowKind(RowKind.UPDATE_BEFORE);
after.setRowKind(RowKind.UPDATE_AFTER);
emitRow(row, before, out);
emitRow(row, after, out);
}
} else if (OP_DELETE.equals(type)) {
// "data" field is an array of row, contains deleted rows
ArrayData data = row.getArray(0);
for (int i = 0; i < data.size(); i++) {
GenericRowData insert = (GenericRowData) data.getRow(i, fieldCount);
insert.setRowKind(RowKind.DELETE);
emitRow(row, insert, out);
}
} else if (OP_CREATE.equals(type)) {
// this is a DDL change event, and we should skip it.
return;
} else {
if (!ignoreParseErrors) {
throw new IOException(format("Unknown \"type\" value \"%s\". The Canal JSON message is '%s'", type, new String(message)));
}
}
} catch (Throwable t) {
// a big try catch to protect the processing.
if (!ignoreParseErrors) {
throw new IOException(format("Corrupt Canal JSON message '%s'.", new String(message)), t);
}
}
}
use of org.apache.flink.table.data.ArrayData in project flink by apache.
the class CanalJsonSerializationSchema method serialize.
@Override
public byte[] serialize(RowData row) {
try {
StringData opType = rowKind2String(row.getRowKind());
ArrayData arrayData = new GenericArrayData(new RowData[] { row });
reuse.setField(0, arrayData);
reuse.setField(1, opType);
return jsonSerializer.serialize(reuse);
} catch (Throwable t) {
throw new RuntimeException("Could not serialize row '" + row + "'.", t);
}
}
Aggregations