Search in sources :

Example 1 with JdbcParameterValuesProvider

use of org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider in project flink by apache.

the class JdbcRowDataInputFormatTest method testJdbcInputFormatWithoutParallelismAndNumericColumnSplitting.

@Test
public void testJdbcInputFormatWithoutParallelismAndNumericColumnSplitting() throws IOException {
    final long min = TEST_DATA[0].id;
    final long max = TEST_DATA[TEST_DATA.length - 1].id;
    // generate a single split
    final long fetchSize = max + 1;
    JdbcParameterValuesProvider pramProvider = new JdbcNumericBetweenParametersProvider(min, max).ofBatchSize(fetchSize);
    inputFormat = JdbcRowDataInputFormat.builder().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(SELECT_ALL_BOOKS_SPLIT_BY_ID).setParametersProvider(pramProvider).setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE).setRowConverter(dialect.getRowConverter(rowType)).build();
    inputFormat.openInputFormat();
    InputSplit[] splits = inputFormat.createInputSplits(1);
    // assert that a single split was generated
    Assert.assertEquals(1, splits.length);
    int recordCount = 0;
    RowData row = new GenericRowData(5);
    for (InputSplit split : splits) {
        inputFormat.open(split);
        while (!inputFormat.reachedEnd()) {
            RowData next = inputFormat.nextRecord(row);
            assertEquals(TEST_DATA[recordCount], next);
            recordCount++;
        }
        inputFormat.close();
    }
    inputFormat.closeInputFormat();
    Assert.assertEquals(TEST_DATA.length, recordCount);
}
Also used : GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) GenericRowData(org.apache.flink.table.data.GenericRowData) JdbcNumericBetweenParametersProvider(org.apache.flink.connector.jdbc.split.JdbcNumericBetweenParametersProvider) InputSplit(org.apache.flink.core.io.InputSplit) JdbcParameterValuesProvider(org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider) Test(org.junit.Test)

Example 2 with JdbcParameterValuesProvider

use of org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider in project flink by apache.

the class JdbcRowDataInputFormatTest method testJdbcInputFormatWithParallelismAndGenericSplitting.

@Test
public void testJdbcInputFormatWithParallelismAndGenericSplitting() throws IOException {
    Serializable[][] queryParameters = new String[2][1];
    queryParameters[0] = new String[] { TEST_DATA[3].author };
    queryParameters[1] = new String[] { TEST_DATA[0].author };
    JdbcParameterValuesProvider paramProvider = new JdbcGenericParameterValuesProvider(queryParameters);
    inputFormat = JdbcRowDataInputFormat.builder().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(SELECT_ALL_BOOKS_SPLIT_BY_AUTHOR).setParametersProvider(paramProvider).setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE).setRowConverter(dialect.getRowConverter(rowType)).build();
    inputFormat.openInputFormat();
    InputSplit[] splits = inputFormat.createInputSplits(1);
    // this query exploit parallelism (1 split for every queryParameters row)
    Assert.assertEquals(queryParameters.length, splits.length);
    verifySplit(splits[0], TEST_DATA[3].id);
    verifySplit(splits[1], TEST_DATA[0].id + TEST_DATA[1].id);
    inputFormat.closeInputFormat();
}
Also used : JdbcGenericParameterValuesProvider(org.apache.flink.connector.jdbc.split.JdbcGenericParameterValuesProvider) InputSplit(org.apache.flink.core.io.InputSplit) JdbcParameterValuesProvider(org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider) Test(org.junit.Test)

Example 3 with JdbcParameterValuesProvider

use of org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider in project flink by apache.

the class JdbcInputFormatTest method testJdbcInputFormatWithParallelismAndGenericSplitting.

@Test
public void testJdbcInputFormatWithParallelismAndGenericSplitting() throws IOException {
    Serializable[][] queryParameters = new String[2][1];
    queryParameters[0] = new String[] { TEST_DATA[3].author };
    queryParameters[1] = new String[] { TEST_DATA[0].author };
    JdbcParameterValuesProvider paramProvider = new JdbcGenericParameterValuesProvider(queryParameters);
    jdbcInputFormat = JdbcInputFormat.buildJdbcInputFormat().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(SELECT_ALL_BOOKS_SPLIT_BY_AUTHOR).setRowTypeInfo(ROW_TYPE_INFO).setParametersProvider(paramProvider).setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE).finish();
    jdbcInputFormat.openInputFormat();
    InputSplit[] splits = jdbcInputFormat.createInputSplits(1);
    // this query exploit parallelism (1 split for every queryParameters row)
    Assert.assertEquals(queryParameters.length, splits.length);
    verifySplit(splits[0], TEST_DATA[3].id);
    verifySplit(splits[1], TEST_DATA[0].id + TEST_DATA[1].id);
    jdbcInputFormat.closeInputFormat();
}
Also used : JdbcGenericParameterValuesProvider(org.apache.flink.connector.jdbc.split.JdbcGenericParameterValuesProvider) InputSplit(org.apache.flink.core.io.InputSplit) JdbcParameterValuesProvider(org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider) Test(org.junit.Test)

Example 4 with JdbcParameterValuesProvider

use of org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider in project flink by apache.

the class JdbcInputFormatTest method testJdbcInputFormatWithParallelismAndNumericColumnSplitting.

@Test
public void testJdbcInputFormatWithParallelismAndNumericColumnSplitting() throws IOException {
    final int fetchSize = 1;
    final long min = TEST_DATA[0].id;
    final long max = TEST_DATA[TEST_DATA.length - fetchSize].id;
    JdbcParameterValuesProvider pramProvider = new JdbcNumericBetweenParametersProvider(min, max).ofBatchSize(fetchSize);
    jdbcInputFormat = JdbcInputFormat.buildJdbcInputFormat().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(SELECT_ALL_BOOKS_SPLIT_BY_ID).setRowTypeInfo(ROW_TYPE_INFO).setParametersProvider(pramProvider).setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE).finish();
    jdbcInputFormat.openInputFormat();
    InputSplit[] splits = jdbcInputFormat.createInputSplits(1);
    // this query exploit parallelism (1 split for every id)
    Assert.assertEquals(TEST_DATA.length, splits.length);
    int recordCount = 0;
    Row row = new Row(5);
    for (InputSplit split : splits) {
        jdbcInputFormat.open(split);
        while (!jdbcInputFormat.reachedEnd()) {
            Row next = jdbcInputFormat.nextRecord(row);
            assertEquals(TEST_DATA[recordCount], next);
            recordCount++;
        }
        jdbcInputFormat.close();
    }
    jdbcInputFormat.closeInputFormat();
    Assert.assertEquals(TEST_DATA.length, recordCount);
}
Also used : Row(org.apache.flink.types.Row) JdbcNumericBetweenParametersProvider(org.apache.flink.connector.jdbc.split.JdbcNumericBetweenParametersProvider) InputSplit(org.apache.flink.core.io.InputSplit) JdbcParameterValuesProvider(org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider) Test(org.junit.Test)

Example 5 with JdbcParameterValuesProvider

use of org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider in project flink by apache.

the class JdbcRowDataInputFormatTest method testJdbcInputFormatWithParallelismAndNumericColumnSplitting.

@Test
public void testJdbcInputFormatWithParallelismAndNumericColumnSplitting() throws IOException {
    final int fetchSize = 1;
    final long min = TEST_DATA[0].id;
    final long max = TEST_DATA[TEST_DATA.length - fetchSize].id;
    JdbcParameterValuesProvider pramProvider = new JdbcNumericBetweenParametersProvider(min, max).ofBatchSize(fetchSize);
    inputFormat = JdbcRowDataInputFormat.builder().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(SELECT_ALL_BOOKS_SPLIT_BY_ID).setParametersProvider(pramProvider).setResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE).setRowConverter(dialect.getRowConverter(rowType)).build();
    inputFormat.openInputFormat();
    InputSplit[] splits = inputFormat.createInputSplits(1);
    // this query exploit parallelism (1 split for every id)
    Assert.assertEquals(TEST_DATA.length, splits.length);
    int recordCount = 0;
    RowData row = new GenericRowData(5);
    for (InputSplit split : splits) {
        inputFormat.open(split);
        while (!inputFormat.reachedEnd()) {
            RowData next = inputFormat.nextRecord(row);
            assertEquals(TEST_DATA[recordCount], next);
            recordCount++;
        }
        inputFormat.close();
    }
    inputFormat.closeInputFormat();
    Assert.assertEquals(TEST_DATA.length, recordCount);
}
Also used : GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) GenericRowData(org.apache.flink.table.data.GenericRowData) JdbcNumericBetweenParametersProvider(org.apache.flink.connector.jdbc.split.JdbcNumericBetweenParametersProvider) InputSplit(org.apache.flink.core.io.InputSplit) JdbcParameterValuesProvider(org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider) Test(org.junit.Test)

Aggregations

JdbcParameterValuesProvider (org.apache.flink.connector.jdbc.split.JdbcParameterValuesProvider)6 InputSplit (org.apache.flink.core.io.InputSplit)6 Test (org.junit.Test)6 JdbcNumericBetweenParametersProvider (org.apache.flink.connector.jdbc.split.JdbcNumericBetweenParametersProvider)4 JdbcGenericParameterValuesProvider (org.apache.flink.connector.jdbc.split.JdbcGenericParameterValuesProvider)2 GenericRowData (org.apache.flink.table.data.GenericRowData)2 RowData (org.apache.flink.table.data.RowData)2 Row (org.apache.flink.types.Row)2