use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class DebeziumAvroSerializationSchema method open.
@Override
public void open(InitializationContext context) throws Exception {
avroSerializer.open(context);
outputReuse = new GenericRowData(3);
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class PythonTypeUtilsTest method testLogicalTypeToDataConverter.
@Test
public void testLogicalTypeToDataConverter() {
PythonTypeUtils.DataConverter converter = PythonTypeUtils.toDataConverter(new IntType());
GenericRowData data = new GenericRowData(1);
data.setField(0, 10);
Object externalData = converter.toExternal(data, 0);
assertTrue(externalData instanceof Long);
assertEquals(externalData, 10L);
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class JdbcRowDataInputFormatTest method testJdbcInputFormatWithoutParallelismAndNumericColumnSplitting.
@Test
public void testJdbcInputFormatWithoutParallelismAndNumericColumnSplitting() throws IOException {
final long min = TEST_DATA[0].id;
final long max = TEST_DATA[TEST_DATA.length - 1].id;
// generate a single split
final long fetchSize = max + 1;
JdbcParameterValuesProvider pramProvider = new JdbcNumericBetweenParametersProvider(min, max).ofBatchSize(fetchSize);
inputFormat = JdbcRowDataInputFormat.builder().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(SELECT_ALL_BOOKS_SPLIT_BY_ID).setParametersProvider(pramProvider).setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE).setRowConverter(dialect.getRowConverter(rowType)).build();
inputFormat.openInputFormat();
InputSplit[] splits = inputFormat.createInputSplits(1);
// assert that a single split was generated
Assert.assertEquals(1, splits.length);
int recordCount = 0;
RowData row = new GenericRowData(5);
for (InputSplit split : splits) {
inputFormat.open(split);
while (!inputFormat.reachedEnd()) {
RowData next = inputFormat.nextRecord(row);
assertEquals(TEST_DATA[recordCount], next);
recordCount++;
}
inputFormat.close();
}
inputFormat.closeInputFormat();
Assert.assertEquals(TEST_DATA.length, recordCount);
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class JdbcRowDataInputFormatTest method testJdbcInputFormatWithoutParallelism.
@Test
public void testJdbcInputFormatWithoutParallelism() throws IOException {
inputFormat = JdbcRowDataInputFormat.builder().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(SELECT_ALL_BOOKS).setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE).setRowConverter(dialect.getRowConverter(rowType)).build();
// this query does not exploit parallelism
Assert.assertEquals(1, inputFormat.createInputSplits(1).length);
inputFormat.openInputFormat();
inputFormat.open(null);
RowData row = new GenericRowData(5);
int recordCount = 0;
while (!inputFormat.reachedEnd()) {
RowData next = inputFormat.nextRecord(row);
assertEquals(TEST_DATA[recordCount], next);
recordCount++;
}
inputFormat.close();
inputFormat.closeInputFormat();
Assert.assertEquals(TEST_DATA.length, recordCount);
}
use of org.apache.flink.table.data.GenericRowData in project flink by apache.
the class JdbcRowDataInputFormatTest method verifySplit.
private void verifySplit(InputSplit split, int expectedIDSum) throws IOException {
int sum = 0;
RowData row = new GenericRowData(5);
inputFormat.open(split);
RowData.FieldGetter idFieldGetter = RowData.createFieldGetter(new IntType(), 0);
while (!inputFormat.reachedEnd()) {
row = inputFormat.nextRecord(row);
int id = (int) idFieldGetter.getFieldOrNull(row);
int testDataIndex = id - 1001;
assertEquals(TEST_DATA[testDataIndex], row);
sum += id;
}
Assert.assertEquals(expectedIDSum, sum);
}
Aggregations