use of org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider in project flink by apache.
the class JdbcRowDataInputFormatTest method testJdbcInputFormatWithoutParallelismAndNumericColumnSplitting.
@Test
public void testJdbcInputFormatWithoutParallelismAndNumericColumnSplitting() throws IOException {
final long min = TEST_DATA[0].id;
final long max = TEST_DATA[TEST_DATA.length - 1].id;
// generate a single split
final long fetchSize = max + 1;
JdbcParameterValuesProvider pramProvider = new JdbcNumericBetweenParametersProvider(min, max).ofBatchSize(fetchSize);
inputFormat = JdbcRowDataInputFormat.builder().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(SELECT_ALL_BOOKS_SPLIT_BY_ID).setParametersProvider(pramProvider).setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE).setRowConverter(dialect.getRowConverter(rowType)).build();
inputFormat.openInputFormat();
InputSplit[] splits = inputFormat.createInputSplits(1);
// assert that a single split was generated
Assert.assertEquals(1, splits.length);
int recordCount = 0;
RowData row = new GenericRowData(5);
for (InputSplit split : splits) {
inputFormat.open(split);
while (!inputFormat.reachedEnd()) {
RowData next = inputFormat.nextRecord(row);
assertEquals(TEST_DATA[recordCount], next);
recordCount++;
}
inputFormat.close();
}
inputFormat.closeInputFormat();
Assert.assertEquals(TEST_DATA.length, recordCount);
}
use of org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider in project flink by apache.
the class JdbcRowDataInputFormatTest method testJdbcInputFormatWithParallelismAndGenericSplitting.
@Test
public void testJdbcInputFormatWithParallelismAndGenericSplitting() throws IOException {
Serializable[][] queryParameters = new String[2][1];
queryParameters[0] = new String[] { TEST_DATA[3].author };
queryParameters[1] = new String[] { TEST_DATA[0].author };
JdbcParameterValuesProvider paramProvider = new JdbcGenericParameterValuesProvider(queryParameters);
inputFormat = JdbcRowDataInputFormat.builder().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(SELECT_ALL_BOOKS_SPLIT_BY_AUTHOR).setParametersProvider(paramProvider).setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE).setRowConverter(dialect.getRowConverter(rowType)).build();
inputFormat.openInputFormat();
InputSplit[] splits = inputFormat.createInputSplits(1);
// this query exploit parallelism (1 split for every queryParameters row)
Assert.assertEquals(queryParameters.length, splits.length);
verifySplit(splits[0], TEST_DATA[3].id);
verifySplit(splits[1], TEST_DATA[0].id + TEST_DATA[1].id);
inputFormat.closeInputFormat();
}
use of org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider in project flink by apache.
the class JdbcInputFormatTest method testJdbcInputFormatWithParallelismAndGenericSplitting.
@Test
public void testJdbcInputFormatWithParallelismAndGenericSplitting() throws IOException {
Serializable[][] queryParameters = new String[2][1];
queryParameters[0] = new String[] { TEST_DATA[3].author };
queryParameters[1] = new String[] { TEST_DATA[0].author };
JdbcParameterValuesProvider paramProvider = new JdbcGenericParameterValuesProvider(queryParameters);
jdbcInputFormat = JdbcInputFormat.buildJdbcInputFormat().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(SELECT_ALL_BOOKS_SPLIT_BY_AUTHOR).setRowTypeInfo(ROW_TYPE_INFO).setParametersProvider(paramProvider).setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE).finish();
jdbcInputFormat.openInputFormat();
InputSplit[] splits = jdbcInputFormat.createInputSplits(1);
// this query exploit parallelism (1 split for every queryParameters row)
Assert.assertEquals(queryParameters.length, splits.length);
verifySplit(splits[0], TEST_DATA[3].id);
verifySplit(splits[1], TEST_DATA[0].id + TEST_DATA[1].id);
jdbcInputFormat.closeInputFormat();
}
use of org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider in project flink by apache.
the class JdbcInputFormatTest method testJdbcInputFormatWithParallelismAndNumericColumnSplitting.
@Test
public void testJdbcInputFormatWithParallelismAndNumericColumnSplitting() throws IOException {
final int fetchSize = 1;
final long min = TEST_DATA[0].id;
final long max = TEST_DATA[TEST_DATA.length - fetchSize].id;
JdbcParameterValuesProvider pramProvider = new JdbcNumericBetweenParametersProvider(min, max).ofBatchSize(fetchSize);
jdbcInputFormat = JdbcInputFormat.buildJdbcInputFormat().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(SELECT_ALL_BOOKS_SPLIT_BY_ID).setRowTypeInfo(ROW_TYPE_INFO).setParametersProvider(pramProvider).setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE).finish();
jdbcInputFormat.openInputFormat();
InputSplit[] splits = jdbcInputFormat.createInputSplits(1);
// this query exploit parallelism (1 split for every id)
Assert.assertEquals(TEST_DATA.length, splits.length);
int recordCount = 0;
Row row = new Row(5);
for (InputSplit split : splits) {
jdbcInputFormat.open(split);
while (!jdbcInputFormat.reachedEnd()) {
Row next = jdbcInputFormat.nextRecord(row);
assertEquals(TEST_DATA[recordCount], next);
recordCount++;
}
jdbcInputFormat.close();
}
jdbcInputFormat.closeInputFormat();
Assert.assertEquals(TEST_DATA.length, recordCount);
}
use of org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider in project flink by apache.
the class JdbcRowDataInputFormatTest method testJdbcInputFormatWithParallelismAndNumericColumnSplitting.
@Test
public void testJdbcInputFormatWithParallelismAndNumericColumnSplitting() throws IOException {
final int fetchSize = 1;
final long min = TEST_DATA[0].id;
final long max = TEST_DATA[TEST_DATA.length - fetchSize].id;
JdbcParameterValuesProvider pramProvider = new JdbcNumericBetweenParametersProvider(min, max).ofBatchSize(fetchSize);
inputFormat = JdbcRowDataInputFormat.builder().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(SELECT_ALL_BOOKS_SPLIT_BY_ID).setParametersProvider(pramProvider).setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE).setRowConverter(dialect.getRowConverter(rowType)).build();
inputFormat.openInputFormat();
InputSplit[] splits = inputFormat.createInputSplits(1);
// this query exploit parallelism (1 split for every id)
Assert.assertEquals(TEST_DATA.length, splits.length);
int recordCount = 0;
RowData row = new GenericRowData(5);
for (InputSplit split : splits) {
inputFormat.open(split);
while (!inputFormat.reachedEnd()) {
RowData next = inputFormat.nextRecord(row);
assertEquals(TEST_DATA[recordCount], next);
recordCount++;
}
inputFormat.close();
}
inputFormat.closeInputFormat();
Assert.assertEquals(TEST_DATA.length, recordCount);
}
Aggregations