Search in sources :

Example 6 with JdbcDmlOptions

use of org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions in project flink by apache.

the class JdbcOutputFormatTest method testFlushWithBatchSizeEqualsZero.

@Test
public void testFlushWithBatchSizeEqualsZero() throws SQLException, IOException {
    JdbcConnectorOptions jdbcOptions = JdbcConnectorOptions.builder().setDriverName(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setTableName(OUTPUT_TABLE_2).build();
    JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(jdbcOptions.getTableName()).withDialect(jdbcOptions.getDialect()).withFieldNames(fieldNames).build();
    JdbcExecutionOptions executionOptions = JdbcExecutionOptions.builder().withBatchSize(0).build();
    outputFormat = new JdbcOutputFormatBuilder().setJdbcOptions(jdbcOptions).setFieldDataTypes(fieldDataTypes).setJdbcDmlOptions(dmlOptions).setJdbcExecutionOptions(executionOptions).setRowDataTypeInfo(rowDataTypeInfo).build();
    setRuntimeContext(outputFormat, true);
    try (Connection dbConn = DriverManager.getConnection(DERBY_EBOOKSHOP_DB.getUrl());
        PreparedStatement statement = dbConn.prepareStatement(SELECT_ALL_NEWBOOKS_2)) {
        outputFormat.open(0, 1);
        for (int i = 0; i < 2; ++i) {
            outputFormat.writeRecord(buildGenericData(TEST_DATA[i].id, TEST_DATA[i].title, TEST_DATA[i].author, TEST_DATA[i].price, TEST_DATA[i].qty));
        }
        try (ResultSet resultSet = statement.executeQuery()) {
            assertFalse(resultSet.next());
        }
    } finally {
        outputFormat.close();
    }
}
Also used : JdbcExecutionOptions(org.apache.flink.connector.jdbc.JdbcExecutionOptions) JdbcConnectorOptions(org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions) JdbcDmlOptions(org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions) Connection(java.sql.Connection) ResultSet(java.sql.ResultSet) PreparedStatement(java.sql.PreparedStatement) Test(org.junit.Test)

Example 7 with JdbcDmlOptions

use of org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions in project flink by apache.

the class JdbcOutputFormatTest method testInvalidDriver.

@Test
public void testInvalidDriver() {
    String expectedMsg = "unable to open JDBC writer";
    try {
        JdbcConnectorOptions jdbcOptions = JdbcConnectorOptions.builder().setDriverName("org.apache.derby.jdbc.idontexist").setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setTableName(INPUT_TABLE).build();
        JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(jdbcOptions.getTableName()).withDialect(jdbcOptions.getDialect()).withFieldNames(fieldNames).build();
        outputFormat = new JdbcOutputFormatBuilder().setJdbcOptions(jdbcOptions).setFieldDataTypes(fieldDataTypes).setJdbcDmlOptions(dmlOptions).setJdbcExecutionOptions(JdbcExecutionOptions.builder().build()).build();
        outputFormat.open(0, 1);
        fail("Expected exception is not thrown.");
    } catch (Exception e) {
        assertTrue(findThrowable(e, IOException.class).isPresent());
        assertTrue(findThrowableWithMessage(e, expectedMsg).isPresent());
    }
}
Also used : JdbcConnectorOptions(org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions) JdbcDmlOptions(org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions) IOException(java.io.IOException) SQLException(java.sql.SQLException) IOException(java.io.IOException) Test(org.junit.Test)

Example 8 with JdbcDmlOptions

use of org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions in project flink by apache.

the class JdbcOutputFormatBuilder method createBufferReduceExecutor.

private static JdbcBatchStatementExecutor<RowData> createBufferReduceExecutor(JdbcDmlOptions opt, RuntimeContext ctx, TypeInformation<RowData> rowDataTypeInfo, LogicalType[] fieldTypes) {
    checkArgument(opt.getKeyFields().isPresent());
    JdbcDialect dialect = opt.getDialect();
    String tableName = opt.getTableName();
    String[] pkNames = opt.getKeyFields().get();
    int[] pkFields = Arrays.stream(pkNames).mapToInt(Arrays.asList(opt.getFieldNames())::indexOf).toArray();
    LogicalType[] pkTypes = Arrays.stream(pkFields).mapToObj(f -> fieldTypes[f]).toArray(LogicalType[]::new);
    final TypeSerializer<RowData> typeSerializer = rowDataTypeInfo.createSerializer(ctx.getExecutionConfig());
    final Function<RowData, RowData> valueTransform = ctx.getExecutionConfig().isObjectReuseEnabled() ? typeSerializer::copy : Function.identity();
    return new TableBufferReducedStatementExecutor(createUpsertRowExecutor(dialect, tableName, opt.getFieldNames(), fieldTypes, pkFields, pkNames, pkTypes), createDeleteExecutor(dialect, tableName, pkNames, pkTypes), createRowKeyExtractor(fieldTypes, pkFields), valueTransform);
}
Also used : DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) RuntimeContext(org.apache.flink.api.common.functions.RuntimeContext) JdbcExecutionOptions(org.apache.flink.connector.jdbc.JdbcExecutionOptions) JdbcDmlOptions(org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions) JdbcRowConverter(org.apache.flink.connector.jdbc.converter.JdbcRowConverter) RowType(org.apache.flink.table.types.logical.RowType) Function(java.util.function.Function) FieldNamedPreparedStatement(org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement) RowData.createFieldGetter(org.apache.flink.table.data.RowData.createFieldGetter) TableSimpleStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.TableSimpleStatementExecutor) GenericRowData(org.apache.flink.table.data.GenericRowData) JdbcBatchStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.JdbcBatchStatementExecutor) TableBufferReducedStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.TableBufferReducedStatementExecutor) TableBufferedStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.TableBufferedStatementExecutor) SimpleJdbcConnectionProvider(org.apache.flink.connector.jdbc.internal.connection.SimpleJdbcConnectionProvider) Preconditions.checkNotNull(org.apache.flink.util.Preconditions.checkNotNull) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) RowData(org.apache.flink.table.data.RowData) JdbcOutputFormat(org.apache.flink.connector.jdbc.internal.JdbcOutputFormat) JdbcConnectorOptions(org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions) Serializable(java.io.Serializable) LogicalType(org.apache.flink.table.types.logical.LogicalType) Preconditions.checkArgument(org.apache.flink.util.Preconditions.checkArgument) TableInsertOrUpdateStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.TableInsertOrUpdateStatementExecutor) JdbcDialect(org.apache.flink.connector.jdbc.dialect.JdbcDialect) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) LogicalType(org.apache.flink.table.types.logical.LogicalType) JdbcDialect(org.apache.flink.connector.jdbc.dialect.JdbcDialect) TableBufferReducedStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.TableBufferReducedStatementExecutor)

Example 9 with JdbcDmlOptions

use of org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions in project flink by apache.

the class JdbcTableOutputFormatTest method testJdbcOutputFormat.

@Test
public void testJdbcOutputFormat() throws Exception {
    JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl(getDbMetadata().getUrl()).setTableName(OUTPUT_TABLE).build();
    JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(options.getTableName()).withDialect(options.getDialect()).withFieldNames(fieldNames).withKeyFields(keyFields).build();
    format = new TableJdbcUpsertOutputFormat(new SimpleJdbcConnectionProvider(options), dmlOptions, JdbcExecutionOptions.defaults());
    RuntimeContext context = Mockito.mock(RuntimeContext.class);
    ExecutionConfig config = Mockito.mock(ExecutionConfig.class);
    doReturn(config).when(context).getExecutionConfig();
    doReturn(true).when(config).isObjectReuseEnabled();
    format.setRuntimeContext(context);
    format.open(0, 1);
    for (TestEntry entry : TEST_DATA) {
        format.writeRecord(Tuple2.of(true, toRow(entry)));
    }
    format.flush();
    check(Arrays.stream(TEST_DATA).map(JdbcDataTestBase::toRow).toArray(Row[]::new));
    // override
    for (TestEntry entry : TEST_DATA) {
        format.writeRecord(Tuple2.of(true, toRow(entry)));
    }
    format.flush();
    check(Arrays.stream(TEST_DATA).map(JdbcDataTestBase::toRow).toArray(Row[]::new));
    // delete
    for (int i = 0; i < TEST_DATA.length / 2; i++) {
        format.writeRecord(Tuple2.of(false, toRow(TEST_DATA[i])));
    }
    Row[] expected = new Row[TEST_DATA.length - TEST_DATA.length / 2];
    for (int i = TEST_DATA.length / 2; i < TEST_DATA.length; i++) {
        expected[i - TEST_DATA.length / 2] = toRow(TEST_DATA[i]);
    }
    format.flush();
    check(expected);
}
Also used : JdbcConnectorOptions(org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions) SimpleJdbcConnectionProvider(org.apache.flink.connector.jdbc.internal.connection.SimpleJdbcConnectionProvider) JdbcDmlOptions(org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions) TestEntry(org.apache.flink.connector.jdbc.JdbcTestFixture.TestEntry) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) Row(org.apache.flink.types.Row) RuntimeContext(org.apache.flink.api.common.functions.RuntimeContext) JdbcDataTestBase(org.apache.flink.connector.jdbc.JdbcDataTestBase) Test(org.junit.Test)

Example 10 with JdbcDmlOptions

use of org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions in project flink by apache.

the class JdbcTableOutputFormatTest method testUpsertFormatCloseBeforeOpen.

@Test
public void testUpsertFormatCloseBeforeOpen() throws Exception {
    JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl(getDbMetadata().getUrl()).setTableName(OUTPUT_TABLE).build();
    JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(options.getTableName()).withDialect(options.getDialect()).withFieldNames(fieldNames).withKeyFields(keyFields).build();
    format = new TableJdbcUpsertOutputFormat(new SimpleJdbcConnectionProvider(options), dmlOptions, JdbcExecutionOptions.defaults());
    // FLINK-17544: There should be no NPE thrown from this method
    format.close();
}
Also used : JdbcConnectorOptions(org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions) SimpleJdbcConnectionProvider(org.apache.flink.connector.jdbc.internal.connection.SimpleJdbcConnectionProvider) JdbcDmlOptions(org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions) Test(org.junit.Test)

Aggregations

JdbcConnectorOptions (org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions)15 JdbcDmlOptions (org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions)15 Test (org.junit.Test)14 JdbcExecutionOptions (org.apache.flink.connector.jdbc.JdbcExecutionOptions)6 IOException (java.io.IOException)5 SQLException (java.sql.SQLException)5 TestEntry (org.apache.flink.connector.jdbc.JdbcTestFixture.TestEntry)5 Connection (java.sql.Connection)4 PreparedStatement (java.sql.PreparedStatement)4 ResultSet (java.sql.ResultSet)4 RowData (org.apache.flink.table.data.RowData)4 SimpleJdbcConnectionProvider (org.apache.flink.connector.jdbc.internal.connection.SimpleJdbcConnectionProvider)3 DynamicTableSink (org.apache.flink.table.connector.sink.DynamicTableSink)3 RuntimeContext (org.apache.flink.api.common.functions.RuntimeContext)2 Serializable (java.io.Serializable)1 Arrays (java.util.Arrays)1 Function (java.util.function.Function)1 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)1 TypeInformation (org.apache.flink.api.common.typeinfo.TypeInformation)1 TypeSerializer (org.apache.flink.api.common.typeutils.TypeSerializer)1