use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class OrcBulkRowDataWriterTest method readMap.
/**
* Read MapColumnVector with specify schema {@literal
* map<string,struct<_col3_col0:string,_col3_col1:timestamp>>}.
*/
private static MapData readMap(MapColumnVector mapVector, int row) {
int offset = (int) mapVector.offsets[row];
StringData keyData = readStringData((BytesColumnVector) mapVector.keys, offset);
GenericRowData valueData = new GenericRowData(2);
StructColumnVector structVector = (StructColumnVector) mapVector.values;
BytesColumnVector bytesVector = (BytesColumnVector) structVector.fields[0];
TimestampColumnVector timestampVector = (TimestampColumnVector) structVector.fields[1];
StringData strValueData = readStringData(bytesVector, offset);
TimestampData timestampData = readTimestamp(timestampVector, offset);
valueData.setField(0, strValueData);
valueData.setField(1, timestampData);
Map<StringData, RowData> mapDataMap = new HashMap<>();
mapDataMap.put(keyData, valueData);
return new GenericMapData(mapDataMap);
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class OrcBulkRowDataWriterTest method initInput.
@Before
public void initInput() {
input = new ArrayList<>();
fieldTypes = new LogicalType[4];
fieldTypes[0] = new VarCharType();
fieldTypes[1] = new IntType();
List<RowType.RowField> arrayRowFieldList = Collections.singletonList(new RowType.RowField("_col2_col0", new VarCharType()));
fieldTypes[2] = new ArrayType(new RowType(arrayRowFieldList));
List<RowType.RowField> mapRowFieldList = Arrays.asList(new RowType.RowField("_col3_col0", new VarCharType()), new RowType.RowField("_col3_col1", new TimestampType()));
fieldTypes[3] = new MapType(new VarCharType(), new RowType(mapRowFieldList));
{
GenericRowData rowData = new GenericRowData(4);
rowData.setField(0, new BinaryStringData("_col_0_string_1"));
rowData.setField(1, 1);
GenericRowData arrayValue1 = new GenericRowData(1);
arrayValue1.setField(0, new BinaryStringData("_col_2_row_0_string_1"));
GenericRowData arrayValue2 = new GenericRowData(1);
arrayValue2.setField(0, new BinaryStringData("_col_2_row_1_string_1"));
GenericArrayData arrayData = new GenericArrayData(new Object[] { arrayValue1, arrayValue2 });
rowData.setField(2, arrayData);
GenericRowData mapValue1 = new GenericRowData(2);
mapValue1.setField(0, new BinaryStringData(("_col_3_map_value_string_1")));
mapValue1.setField(1, TimestampData.fromTimestamp(new Timestamp(3600000)));
Map<StringData, RowData> mapDataMap = new HashMap<>();
mapDataMap.put(new BinaryStringData("_col_3_map_key_1"), mapValue1);
GenericMapData mapData = new GenericMapData(mapDataMap);
rowData.setField(3, mapData);
input.add(rowData);
}
{
GenericRowData rowData = new GenericRowData(4);
rowData.setField(0, new BinaryStringData("_col_0_string_2"));
rowData.setField(1, 2);
GenericRowData arrayValue1 = new GenericRowData(1);
arrayValue1.setField(0, new BinaryStringData("_col_2_row_0_string_2"));
GenericRowData arrayValue2 = new GenericRowData(1);
arrayValue2.setField(0, new BinaryStringData("_col_2_row_1_string_2"));
GenericArrayData arrayData = new GenericArrayData(new Object[] { arrayValue1, arrayValue2 });
rowData.setField(2, arrayData);
GenericRowData mapValue1 = new GenericRowData(2);
mapValue1.setField(0, new BinaryStringData(("_col_3_map_value_string_2")));
mapValue1.setField(1, TimestampData.fromTimestamp(new Timestamp(3600000)));
Map<StringData, RowData> mapDataMap = new HashMap<>();
mapDataMap.put(new BinaryStringData("_col_3_map_key_2"), mapValue1);
GenericMapData mapData = new GenericMapData(mapDataMap);
rowData.setField(3, mapData);
input.add(rowData);
}
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class ArrowUtilsTest method testReadArrowBatches.
@Test
public void testReadArrowBatches() throws IOException {
VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator);
ArrowWriter<RowData> arrowWriter = ArrowUtils.createRowDataArrowWriter(root, rowType);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ArrowStreamWriter arrowStreamWriter = new ArrowStreamWriter(root, null, baos);
arrowStreamWriter.start();
List<RowData> testData = Arrays.asList(new GenericRowData(rowType.getFieldCount()), new GenericRowData(rowType.getFieldCount()), new GenericRowData(rowType.getFieldCount()), new GenericRowData(rowType.getFieldCount()), new GenericRowData(rowType.getFieldCount()));
int batches = 3;
List<List<RowData>> subLists = Lists.partition(testData, testData.size() / batches + 1);
for (List<RowData> subList : subLists) {
for (RowData value : subList) {
arrowWriter.write(value);
}
arrowWriter.finish();
arrowStreamWriter.writeBatch();
arrowWriter.reset();
}
assertEquals(batches, ArrowUtils.readArrowBatches(Channels.newChannel(new ByteArrayInputStream(baos.toByteArray()))).length);
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class PassThroughPythonStreamGroupWindowAggregateOperator method open.
@Override
public void open() throws Exception {
super.open();
windowBaos = new ByteArrayOutputStreamWithPos();
windowBaosWrapper = new DataOutputViewStreamWrapper(windowBaos);
reusePythonRowData = new UpdatableRowData(GenericRowData.of(NORMAL_RECORD, null, null), 3);
reusePythonTimerRowData = new UpdatableRowData(GenericRowData.of(TRIGGER_TIMER, null, null), 3);
reusePythonTimerData = new UpdatableRowData(GenericRowData.of(0, null, null, null), 4);
reuseJoinedRow = new JoinedRowData();
windowAggResult = new JoinedRowData();
reusePythonTimerRowData.setField(2, reusePythonTimerData);
windowAccumulateData = new HashMap<>();
windowRetractData = new HashMap<>();
mockPythonInternalService = (InternalTimerServiceImpl<K, TimeWindow>) getInternalTimerService("python-window-timers", windowSerializer, this.mockPythonWindowOperator);
this.groupKeyProjection = createProjection("GroupKey", grouping);
int inputFieldIndex = (int) aggregateFunction.getInputs()[0];
this.aggExtracter = input -> {
GenericRowData aggResult = new GenericRowData(1);
aggResult.setField(0, input.getLong(inputFieldIndex));
return aggResult;
};
this.windowExtractor = window -> {
GenericRowData windowProperty = new GenericRowData(namedProperties.length);
for (int i = 0; i < namedProperties.length; i++) {
switch(namedProperties[i]) {
case WINDOW_START:
windowProperty.setField(i, getShiftEpochMills(window.getStart()));
break;
case WINDOW_END:
windowProperty.setField(i, getShiftEpochMills(window.getEnd()));
break;
case ROW_TIME_ATTRIBUTE:
windowProperty.setField(i, getShiftEpochMills(window.getEnd() - 1));
break;
case PROC_TIME_ATTRIBUTE:
windowProperty.setField(i, -1L);
}
}
return windowProperty;
};
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class HBaseSerde method convertToRow.
/**
* Converts HBase {@link Result} into {@link RowData}.
*
* @deprecated Use {@link #convertToReusedRow(Result)} instead.
*/
@Deprecated
public RowData convertToRow(Result result) {
for (int i = 0; i < fieldLength; i++) {
if (rowkeyIndex == i) {
assert keyDecoder != null;
Object rowkey = keyDecoder.decode(result.getRow());
reusedRow.setField(rowkeyIndex, rowkey);
} else {
int f = (rowkeyIndex != -1 && i > rowkeyIndex) ? i - 1 : i;
// get family key
byte[] familyKey = families[f];
GenericRowData familyRow = reusedFamilyRows[f];
for (int q = 0; q < this.qualifiers[f].length; q++) {
// get quantifier key
byte[] qualifier = qualifiers[f][q];
// read value
byte[] value = result.getValue(familyKey, qualifier);
familyRow.setField(q, qualifierDecoders[f][q].decode(value));
}
reusedRow.setField(i, familyRow);
}
}
return reusedRow;
}
Aggregations