use of org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions in project flink by apache.
the class JdbcOutputFormatTest method testFlushWithBatchSizeEqualsZero.
@Test
public void testFlushWithBatchSizeEqualsZero() throws SQLException, IOException {
JdbcConnectorOptions jdbcOptions = JdbcConnectorOptions.builder().setDriverName(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setTableName(OUTPUT_TABLE_2).build();
JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(jdbcOptions.getTableName()).withDialect(jdbcOptions.getDialect()).withFieldNames(fieldNames).build();
JdbcExecutionOptions executionOptions = JdbcExecutionOptions.builder().withBatchSize(0).build();
outputFormat = new JdbcOutputFormatBuilder().setJdbcOptions(jdbcOptions).setFieldDataTypes(fieldDataTypes).setJdbcDmlOptions(dmlOptions).setJdbcExecutionOptions(executionOptions).setRowDataTypeInfo(rowDataTypeInfo).build();
setRuntimeContext(outputFormat, true);
try (Connection dbConn = DriverManager.getConnection(DERBY_EBOOKSHOP_DB.getUrl());
PreparedStatement statement = dbConn.prepareStatement(SELECT_ALL_NEWBOOKS_2)) {
outputFormat.open(0, 1);
for (int i = 0; i < 2; ++i) {
outputFormat.writeRecord(buildGenericData(TEST_DATA[i].id, TEST_DATA[i].title, TEST_DATA[i].author, TEST_DATA[i].price, TEST_DATA[i].qty));
}
try (ResultSet resultSet = statement.executeQuery()) {
assertFalse(resultSet.next());
}
} finally {
outputFormat.close();
}
}
use of org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions in project flink by apache.
the class JdbcOutputFormatTest method testInvalidDriver.
@Test
public void testInvalidDriver() {
String expectedMsg = "unable to open JDBC writer";
try {
JdbcConnectorOptions jdbcOptions = JdbcConnectorOptions.builder().setDriverName("org.apache.derby.jdbc.idontexist").setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setTableName(INPUT_TABLE).build();
JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(jdbcOptions.getTableName()).withDialect(jdbcOptions.getDialect()).withFieldNames(fieldNames).build();
outputFormat = new JdbcOutputFormatBuilder().setJdbcOptions(jdbcOptions).setFieldDataTypes(fieldDataTypes).setJdbcDmlOptions(dmlOptions).setJdbcExecutionOptions(JdbcExecutionOptions.builder().build()).build();
outputFormat.open(0, 1);
fail("Expected exception is not thrown.");
} catch (Exception e) {
assertTrue(findThrowable(e, IOException.class).isPresent());
assertTrue(findThrowableWithMessage(e, expectedMsg).isPresent());
}
}
use of org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions in project flink by apache.
the class JdbcOutputFormatBuilder method createBufferReduceExecutor.
private static JdbcBatchStatementExecutor<RowData> createBufferReduceExecutor(JdbcDmlOptions opt, RuntimeContext ctx, TypeInformation<RowData> rowDataTypeInfo, LogicalType[] fieldTypes) {
checkArgument(opt.getKeyFields().isPresent());
JdbcDialect dialect = opt.getDialect();
String tableName = opt.getTableName();
String[] pkNames = opt.getKeyFields().get();
int[] pkFields = Arrays.stream(pkNames).mapToInt(Arrays.asList(opt.getFieldNames())::indexOf).toArray();
LogicalType[] pkTypes = Arrays.stream(pkFields).mapToObj(f -> fieldTypes[f]).toArray(LogicalType[]::new);
final TypeSerializer<RowData> typeSerializer = rowDataTypeInfo.createSerializer(ctx.getExecutionConfig());
final Function<RowData, RowData> valueTransform = ctx.getExecutionConfig().isObjectReuseEnabled() ? typeSerializer::copy : Function.identity();
return new TableBufferReducedStatementExecutor(createUpsertRowExecutor(dialect, tableName, opt.getFieldNames(), fieldTypes, pkFields, pkNames, pkTypes), createDeleteExecutor(dialect, tableName, pkNames, pkTypes), createRowKeyExtractor(fieldTypes, pkFields), valueTransform);
}
use of org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions in project flink by apache.
the class JdbcTableOutputFormatTest method testJdbcOutputFormat.
@Test
public void testJdbcOutputFormat() throws Exception {
JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl(getDbMetadata().getUrl()).setTableName(OUTPUT_TABLE).build();
JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(options.getTableName()).withDialect(options.getDialect()).withFieldNames(fieldNames).withKeyFields(keyFields).build();
format = new TableJdbcUpsertOutputFormat(new SimpleJdbcConnectionProvider(options), dmlOptions, JdbcExecutionOptions.defaults());
RuntimeContext context = Mockito.mock(RuntimeContext.class);
ExecutionConfig config = Mockito.mock(ExecutionConfig.class);
doReturn(config).when(context).getExecutionConfig();
doReturn(true).when(config).isObjectReuseEnabled();
format.setRuntimeContext(context);
format.open(0, 1);
for (TestEntry entry : TEST_DATA) {
format.writeRecord(Tuple2.of(true, toRow(entry)));
}
format.flush();
check(Arrays.stream(TEST_DATA).map(JdbcDataTestBase::toRow).toArray(Row[]::new));
// override
for (TestEntry entry : TEST_DATA) {
format.writeRecord(Tuple2.of(true, toRow(entry)));
}
format.flush();
check(Arrays.stream(TEST_DATA).map(JdbcDataTestBase::toRow).toArray(Row[]::new));
// delete
for (int i = 0; i < TEST_DATA.length / 2; i++) {
format.writeRecord(Tuple2.of(false, toRow(TEST_DATA[i])));
}
Row[] expected = new Row[TEST_DATA.length - TEST_DATA.length / 2];
for (int i = TEST_DATA.length / 2; i < TEST_DATA.length; i++) {
expected[i - TEST_DATA.length / 2] = toRow(TEST_DATA[i]);
}
format.flush();
check(expected);
}
use of org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions in project flink by apache.
the class JdbcTableOutputFormatTest method testUpsertFormatCloseBeforeOpen.
@Test
public void testUpsertFormatCloseBeforeOpen() throws Exception {
JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl(getDbMetadata().getUrl()).setTableName(OUTPUT_TABLE).build();
JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(options.getTableName()).withDialect(options.getDialect()).withFieldNames(fieldNames).withKeyFields(keyFields).build();
format = new TableJdbcUpsertOutputFormat(new SimpleJdbcConnectionProvider(options), dmlOptions, JdbcExecutionOptions.defaults());
// FLINK-17544: There should be no NPE thrown from this method
format.close();
}
Aggregations