use of org.apache.flink.connector.jdbc.JdbcTestFixture.TestEntry in project flink by apache.
the class JdbcOutputFormatTest method testJdbcOutputFormat.
@Test
public void testJdbcOutputFormat() throws IOException, SQLException {
JdbcConnectorOptions jdbcOptions = JdbcConnectorOptions.builder().setDriverName(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setTableName(OUTPUT_TABLE).build();
JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(jdbcOptions.getTableName()).withDialect(jdbcOptions.getDialect()).withFieldNames(fieldNames).build();
outputFormat = new JdbcOutputFormatBuilder().setJdbcOptions(jdbcOptions).setFieldDataTypes(fieldDataTypes).setJdbcDmlOptions(dmlOptions).setJdbcExecutionOptions(JdbcExecutionOptions.builder().build()).setRowDataTypeInfo(rowDataTypeInfo).build();
setRuntimeContext(outputFormat, true);
outputFormat.open(0, 1);
setRuntimeContext(outputFormat, true);
outputFormat.open(0, 1);
for (TestEntry entry : TEST_DATA) {
outputFormat.writeRecord(buildGenericData(entry.id, entry.title, entry.author, entry.price, entry.qty));
}
outputFormat.close();
try (Connection dbConn = DriverManager.getConnection(DERBY_EBOOKSHOP_DB.getUrl());
PreparedStatement statement = dbConn.prepareStatement(SELECT_ALL_NEWBOOKS);
ResultSet resultSet = statement.executeQuery()) {
int recordCount = 0;
while (resultSet.next()) {
assertEquals(TEST_DATA[recordCount].id, resultSet.getObject("id"));
assertEquals(TEST_DATA[recordCount].title, resultSet.getObject("title"));
assertEquals(TEST_DATA[recordCount].author, resultSet.getObject("author"));
assertEquals(TEST_DATA[recordCount].price, resultSet.getObject("price"));
assertEquals(TEST_DATA[recordCount].qty, resultSet.getObject("qty"));
recordCount++;
}
assertEquals(TEST_DATA.length, recordCount);
}
}
use of org.apache.flink.connector.jdbc.JdbcTestFixture.TestEntry in project flink by apache.
the class JdbcRowOutputFormatTest method testJdbcOutputFormat.
@Test
public void testJdbcOutputFormat() throws IOException, SQLException {
jdbcOutputFormat = JdbcRowOutputFormat.buildJdbcOutputFormat().setDrivername(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setQuery(String.format(INSERT_TEMPLATE, OUTPUT_TABLE)).finish();
setRuntimeContext(jdbcOutputFormat, true);
jdbcOutputFormat.open(0, 1);
for (TestEntry entry : TEST_DATA) {
jdbcOutputFormat.writeRecord(toRow(entry));
}
jdbcOutputFormat.close();
try (Connection dbConn = DriverManager.getConnection(DERBY_EBOOKSHOP_DB.getUrl());
PreparedStatement statement = dbConn.prepareStatement(SELECT_ALL_NEWBOOKS);
ResultSet resultSet = statement.executeQuery()) {
int recordCount = 0;
while (resultSet.next()) {
assertEquals(TEST_DATA[recordCount].id, resultSet.getObject("id"));
assertEquals(TEST_DATA[recordCount].title, resultSet.getObject("title"));
assertEquals(TEST_DATA[recordCount].author, resultSet.getObject("author"));
assertEquals(TEST_DATA[recordCount].price, resultSet.getObject("price"));
assertEquals(TEST_DATA[recordCount].qty, resultSet.getObject("qty"));
recordCount++;
}
assertEquals(TEST_DATA.length, recordCount);
}
}
use of org.apache.flink.connector.jdbc.JdbcTestFixture.TestEntry in project flink by apache.
the class JdbcTableOutputFormatTest method testJdbcOutputFormat.
@Test
public void testJdbcOutputFormat() throws Exception {
JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl(getDbMetadata().getUrl()).setTableName(OUTPUT_TABLE).build();
JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(options.getTableName()).withDialect(options.getDialect()).withFieldNames(fieldNames).withKeyFields(keyFields).build();
format = new TableJdbcUpsertOutputFormat(new SimpleJdbcConnectionProvider(options), dmlOptions, JdbcExecutionOptions.defaults());
RuntimeContext context = Mockito.mock(RuntimeContext.class);
ExecutionConfig config = Mockito.mock(ExecutionConfig.class);
doReturn(config).when(context).getExecutionConfig();
doReturn(true).when(config).isObjectReuseEnabled();
format.setRuntimeContext(context);
format.open(0, 1);
for (TestEntry entry : TEST_DATA) {
format.writeRecord(Tuple2.of(true, toRow(entry)));
}
format.flush();
check(Arrays.stream(TEST_DATA).map(JdbcDataTestBase::toRow).toArray(Row[]::new));
// override
for (TestEntry entry : TEST_DATA) {
format.writeRecord(Tuple2.of(true, toRow(entry)));
}
format.flush();
check(Arrays.stream(TEST_DATA).map(JdbcDataTestBase::toRow).toArray(Row[]::new));
// delete
for (int i = 0; i < TEST_DATA.length / 2; i++) {
format.writeRecord(Tuple2.of(false, toRow(TEST_DATA[i])));
}
Row[] expected = new Row[TEST_DATA.length - TEST_DATA.length / 2];
for (int i = TEST_DATA.length / 2; i < TEST_DATA.length; i++) {
expected[i - TEST_DATA.length / 2] = toRow(TEST_DATA[i]);
}
format.flush();
check(expected);
}
use of org.apache.flink.connector.jdbc.JdbcTestFixture.TestEntry in project flink by apache.
the class JdbcITCase method selectBooks.
private List<TestEntry> selectBooks() throws SQLException {
List<TestEntry> result = new ArrayList<>();
try (Connection connection = DriverManager.getConnection(getDbMetadata().getUrl())) {
connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
connection.setReadOnly(true);
try (Statement st = connection.createStatement()) {
try (ResultSet rs = st.executeQuery("select id, title, author, price, qty from " + INPUT_TABLE)) {
while (rs.next()) {
result.add(new TestEntry(getNullable(rs, r -> r.getInt(1)), getNullable(rs, r -> r.getString(2)), getNullable(rs, r -> r.getString(3)), getNullable(rs, r -> r.getDouble(4)), getNullable(rs, r -> r.getInt(5))));
}
}
}
}
return result;
}
use of org.apache.flink.connector.jdbc.JdbcTestFixture.TestEntry in project flink by apache.
the class JdbcXaSinkTestBase method buildAndInit.
static JdbcXaSinkFunction<TestEntry> buildAndInit(int batchInterval, XaFacade xaFacade, XaSinkStateHandler state) throws Exception {
JdbcXaSinkFunction<TestEntry> sink = buildSink(new SemanticXidGenerator(), xaFacade, state, batchInterval);
sink.initializeState(buildInitCtx(false));
sink.open(new Configuration());
return sink;
}
Aggregations