use of java.sql.JDBCType.NULL in project beam by apache.
the class JdbcIOTest method testCustomFluentBackOffConfiguration.
@Test
public void testCustomFluentBackOffConfiguration() throws Exception {
String tableName = DatabaseTestHelper.getTestTableName("UT_FLUENT_BACKOFF");
DatabaseTestHelper.createTable(DATA_SOURCE, tableName);
// lock table
Connection connection = DATA_SOURCE.getConnection();
Statement lockStatement = connection.createStatement();
lockStatement.execute("ALTER TABLE " + tableName + " LOCKSIZE TABLE");
lockStatement.execute("LOCK TABLE " + tableName + " IN EXCLUSIVE MODE");
// start a first transaction
connection.setAutoCommit(false);
PreparedStatement insertStatement = connection.prepareStatement("insert into " + tableName + " values(?, ?)");
insertStatement.setInt(1, 1);
insertStatement.setString(2, "TEST");
insertStatement.execute();
pipeline.apply(Create.of(Collections.singletonList(KV.of(1, "TEST")))).apply(JdbcIO.<KV<Integer, String>>write().withDataSourceConfiguration(DATA_SOURCE_CONFIGURATION).withStatement(String.format("insert into %s values(?, ?)", tableName)).withRetryStrategy((JdbcIO.RetryStrategy) e -> {
return "40XL1".equals(e.getSQLState());
}).withRetryConfiguration(JdbcIO.RetryConfiguration.create(2, null, Duration.standardSeconds(1))).withPreparedStatementSetter((element, statement) -> {
statement.setInt(1, element.getKey());
statement.setString(2, element.getValue());
}));
PipelineExecutionException exception = assertThrows(PipelineExecutionException.class, () -> {
pipeline.run().waitUntilFinish();
});
// Finally commit the original connection, now that the pipeline has failed due to deadlock.
connection.commit();
assertThat(exception.getMessage(), containsString("java.sql.BatchUpdateException: A lock could not be obtained within the time requested"));
// Verify that pipeline retried the write twice, but encountered a deadlock every time.
expectedLogs.verifyLogRecords(new TypeSafeMatcher<Iterable<LogRecord>>() {
@Override
public void describeTo(Description description) {
}
@Override
protected boolean matchesSafely(Iterable<LogRecord> logRecords) {
int count = 0;
for (LogRecord logRecord : logRecords) {
if (logRecord.getMessage().contains("Deadlock detected, retrying")) {
count += 1;
}
}
// Max retries will be 2 + the original deadlock error.
return count == 3;
}
});
// Since the pipeline was unable to write, only the row from insertStatement was written.
assertRowCount(DATA_SOURCE, tableName, 1);
}
use of java.sql.JDBCType.NULL in project beam by apache.
the class JdbcIOTest method testWriteWithWriteResults.
@Test
public void testWriteWithWriteResults() throws Exception {
String firstTableName = DatabaseTestHelper.getTestTableName("UT_WRITE");
DatabaseTestHelper.createTable(DATA_SOURCE, firstTableName);
try {
ArrayList<KV<Integer, String>> data = getDataToWrite(EXPECTED_ROW_COUNT);
PCollection<KV<Integer, String>> dataCollection = pipeline.apply(Create.of(data));
PCollection<JdbcTestHelper.TestDto> resultSetCollection = dataCollection.apply(getJdbcWrite(firstTableName).withWriteResults(resultSet -> {
if (resultSet != null && resultSet.next()) {
return new JdbcTestHelper.TestDto(resultSet.getInt(1));
}
return new JdbcTestHelper.TestDto(JdbcTestHelper.TestDto.EMPTY_RESULT);
}));
resultSetCollection.setCoder(JdbcTestHelper.TEST_DTO_CODER);
PAssert.thatSingleton(resultSetCollection.apply(Count.globally())).isEqualTo((long) EXPECTED_ROW_COUNT);
List<JdbcTestHelper.TestDto> expectedResult = new ArrayList<>();
for (int i = 0; i < EXPECTED_ROW_COUNT; i++) {
expectedResult.add(new JdbcTestHelper.TestDto(JdbcTestHelper.TestDto.EMPTY_RESULT));
}
PAssert.that(resultSetCollection).containsInAnyOrder(expectedResult);
pipeline.run();
assertRowCount(DATA_SOURCE, firstTableName, EXPECTED_ROW_COUNT);
} finally {
DatabaseTestHelper.deleteTable(DATA_SOURCE, firstTableName);
}
}
Aggregations