Search in sources :

Example 1 with JdbcBatchStatementExecutor

use of org.apache.flink.connector.jdbc.internal.executor.JdbcBatchStatementExecutor in project flink by apache.

the class JdbcOutputFormatBuilder method createBufferReduceExecutor.

private static JdbcBatchStatementExecutor<RowData> createBufferReduceExecutor(JdbcDmlOptions opt, RuntimeContext ctx, TypeInformation<RowData> rowDataTypeInfo, LogicalType[] fieldTypes) {
    checkArgument(opt.getKeyFields().isPresent());
    JdbcDialect dialect = opt.getDialect();
    String tableName = opt.getTableName();
    String[] pkNames = opt.getKeyFields().get();
    int[] pkFields = Arrays.stream(pkNames).mapToInt(Arrays.asList(opt.getFieldNames())::indexOf).toArray();
    LogicalType[] pkTypes = Arrays.stream(pkFields).mapToObj(f -> fieldTypes[f]).toArray(LogicalType[]::new);
    final TypeSerializer<RowData> typeSerializer = rowDataTypeInfo.createSerializer(ctx.getExecutionConfig());
    final Function<RowData, RowData> valueTransform = ctx.getExecutionConfig().isObjectReuseEnabled() ? typeSerializer::copy : Function.identity();
    return new TableBufferReducedStatementExecutor(createUpsertRowExecutor(dialect, tableName, opt.getFieldNames(), fieldTypes, pkFields, pkNames, pkTypes), createDeleteExecutor(dialect, tableName, pkNames, pkTypes), createRowKeyExtractor(fieldTypes, pkFields), valueTransform);
}
Also used : DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) RuntimeContext(org.apache.flink.api.common.functions.RuntimeContext) JdbcExecutionOptions(org.apache.flink.connector.jdbc.JdbcExecutionOptions) JdbcDmlOptions(org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions) JdbcRowConverter(org.apache.flink.connector.jdbc.converter.JdbcRowConverter) RowType(org.apache.flink.table.types.logical.RowType) Function(java.util.function.Function) FieldNamedPreparedStatement(org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement) RowData.createFieldGetter(org.apache.flink.table.data.RowData.createFieldGetter) TableSimpleStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.TableSimpleStatementExecutor) GenericRowData(org.apache.flink.table.data.GenericRowData) JdbcBatchStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.JdbcBatchStatementExecutor) TableBufferReducedStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.TableBufferReducedStatementExecutor) TableBufferedStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.TableBufferedStatementExecutor) SimpleJdbcConnectionProvider(org.apache.flink.connector.jdbc.internal.connection.SimpleJdbcConnectionProvider) Preconditions.checkNotNull(org.apache.flink.util.Preconditions.checkNotNull) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation) TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) RowData(org.apache.flink.table.data.RowData) JdbcOutputFormat(org.apache.flink.connector.jdbc.internal.JdbcOutputFormat) JdbcConnectorOptions(org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions) Serializable(java.io.Serializable) LogicalType(org.apache.flink.table.types.logical.LogicalType) Preconditions.checkArgument(org.apache.flink.util.Preconditions.checkArgument) TableInsertOrUpdateStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.TableInsertOrUpdateStatementExecutor) JdbcDialect(org.apache.flink.connector.jdbc.dialect.JdbcDialect) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) LogicalType(org.apache.flink.table.types.logical.LogicalType) JdbcDialect(org.apache.flink.connector.jdbc.dialect.JdbcDialect) TableBufferReducedStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.TableBufferReducedStatementExecutor)

Example 2 with JdbcBatchStatementExecutor

use of org.apache.flink.connector.jdbc.internal.executor.JdbcBatchStatementExecutor in project flink by apache.

the class JdbcTableOutputFormatTest method testDeleteExecutorUpdatedOnReconnect.

/**
 * Test that the delete executor in {@link TableJdbcUpsertOutputFormat} is updated when {@link
 * JdbcOutputFormat#attemptFlush()} fails.
 */
@Test
public void testDeleteExecutorUpdatedOnReconnect() throws Exception {
    // first fail flush from the main executor
    boolean[] exceptionThrown = { false };
    // then record whether the delete executor was updated
    // and check it on the next flush attempt
    boolean[] deleteExecutorPrepared = { false };
    boolean[] deleteExecuted = { false };
    format = new TableJdbcUpsertOutputFormat(new SimpleJdbcConnectionProvider(JdbcConnectorOptions.builder().setDBUrl(getDbMetadata().getUrl()).setTableName(OUTPUT_TABLE).build()) {

        @Override
        public boolean isConnectionValid() throws SQLException {
            // trigger reconnect and re-prepare on flush failure
            return false;
        }
    }, JdbcExecutionOptions.builder().withMaxRetries(1).withBatchIntervalMs(// disable periodic flush
    Long.MAX_VALUE).build(), ctx -> new JdbcBatchStatementExecutor<Row>() {

        @Override
        public void executeBatch() throws SQLException {
            if (!exceptionThrown[0]) {
                exceptionThrown[0] = true;
                throw new SQLException();
            }
        }

        @Override
        public void prepareStatements(Connection connection) {
        }

        @Override
        public void addToBatch(Row record) {
        }

        @Override
        public void closeStatements() {
        }
    }, ctx -> new JdbcBatchStatementExecutor<Row>() {

        @Override
        public void prepareStatements(Connection connection) {
            if (exceptionThrown[0]) {
                deleteExecutorPrepared[0] = true;
            }
        }

        @Override
        public void addToBatch(Row record) {
        }

        @Override
        public void executeBatch() {
            deleteExecuted[0] = true;
        }

        @Override
        public void closeStatements() {
        }
    });
    RuntimeContext context = Mockito.mock(RuntimeContext.class);
    ExecutionConfig config = Mockito.mock(ExecutionConfig.class);
    doReturn(config).when(context).getExecutionConfig();
    doReturn(true).when(config).isObjectReuseEnabled();
    format.setRuntimeContext(context);
    format.open(0, 1);
    format.writeRecord(Tuple2.of(false, /* false = delete*/
    toRow(TEST_DATA[0])));
    format.flush();
    assertTrue("Delete should be executed", deleteExecuted[0]);
    assertTrue("Delete executor should be prepared" + exceptionThrown[0], deleteExecutorPrepared[0]);
}
Also used : Arrays(java.util.Arrays) TEST_DATA(org.apache.flink.connector.jdbc.JdbcTestFixture.TEST_DATA) TestEntry(org.apache.flink.connector.jdbc.JdbcTestFixture.TestEntry) RuntimeContext(org.apache.flink.api.common.functions.RuntimeContext) JdbcExecutionOptions(org.apache.flink.connector.jdbc.JdbcExecutionOptions) Connection(java.sql.Connection) Tuple2(org.apache.flink.api.java.tuple.Tuple2) JdbcDmlOptions(org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions) ArrayList(java.util.ArrayList) SQLException(java.sql.SQLException) JdbcBatchStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.JdbcBatchStatementExecutor) ResultSet(java.sql.ResultSet) After(org.junit.After) Assert.assertArrayEquals(org.junit.Assert.assertArrayEquals) SimpleJdbcConnectionProvider(org.apache.flink.connector.jdbc.internal.connection.SimpleJdbcConnectionProvider) Mockito.doReturn(org.mockito.Mockito.doReturn) Before(org.junit.Before) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) PreparedStatement(java.sql.PreparedStatement) JdbcConnectorOptions(org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions) JdbcDataTestBase(org.apache.flink.connector.jdbc.JdbcDataTestBase) Mockito(org.mockito.Mockito) List(java.util.List) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) Statement(java.sql.Statement) Row(org.apache.flink.types.Row) OUTPUT_TABLE(org.apache.flink.connector.jdbc.JdbcTestFixture.OUTPUT_TABLE) DriverManager(java.sql.DriverManager) JdbcBatchStatementExecutor(org.apache.flink.connector.jdbc.internal.executor.JdbcBatchStatementExecutor) SimpleJdbcConnectionProvider(org.apache.flink.connector.jdbc.internal.connection.SimpleJdbcConnectionProvider) SQLException(java.sql.SQLException) Connection(java.sql.Connection) Row(org.apache.flink.types.Row) ExecutionConfig(org.apache.flink.api.common.ExecutionConfig) RuntimeContext(org.apache.flink.api.common.functions.RuntimeContext) Test(org.junit.Test)

Aggregations

Arrays (java.util.Arrays)2 RuntimeContext (org.apache.flink.api.common.functions.RuntimeContext)2 JdbcExecutionOptions (org.apache.flink.connector.jdbc.JdbcExecutionOptions)2 SimpleJdbcConnectionProvider (org.apache.flink.connector.jdbc.internal.connection.SimpleJdbcConnectionProvider)2 JdbcBatchStatementExecutor (org.apache.flink.connector.jdbc.internal.executor.JdbcBatchStatementExecutor)2 JdbcConnectorOptions (org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions)2 JdbcDmlOptions (org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions)2 Serializable (java.io.Serializable)1 Connection (java.sql.Connection)1 DriverManager (java.sql.DriverManager)1 PreparedStatement (java.sql.PreparedStatement)1 ResultSet (java.sql.ResultSet)1 SQLException (java.sql.SQLException)1 Statement (java.sql.Statement)1 ArrayList (java.util.ArrayList)1 List (java.util.List)1 Function (java.util.function.Function)1 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)1 TypeInformation (org.apache.flink.api.common.typeinfo.TypeInformation)1 TypeSerializer (org.apache.flink.api.common.typeutils.TypeSerializer)1