use of org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions in project flink by apache.
the class JdbcDynamicTableFactoryTest method testJdbcLookupPropertiesWithExcludeEmptyResult.
@Test
public void testJdbcLookupPropertiesWithExcludeEmptyResult() {
Map<String, String> properties = getAllOptions();
properties.put("lookup.cache.max-rows", "1000");
properties.put("lookup.cache.ttl", "10s");
properties.put("lookup.max-retries", "10");
properties.put("lookup.cache.caching-missing-key", "true");
DynamicTableSource actual = createTableSource(SCHEMA, properties);
JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl("jdbc:derby:memory:mydb").setTableName("mytable").build();
JdbcLookupOptions lookupOptions = JdbcLookupOptions.builder().setCacheMaxSize(1000).setCacheExpireMs(10_000).setMaxRetryTimes(10).setCacheMissingKey(true).build();
JdbcDynamicTableSource expected = new JdbcDynamicTableSource(options, JdbcReadOptions.builder().build(), lookupOptions, SCHEMA.toPhysicalRowDataType());
assertEquals(expected, actual);
}
use of org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions in project flink by apache.
the class JdbcDynamicTableFactoryTest method testJdbcLookupProperties.
@Test
public void testJdbcLookupProperties() {
Map<String, String> properties = getAllOptions();
properties.put("lookup.cache.max-rows", "1000");
properties.put("lookup.cache.ttl", "10s");
properties.put("lookup.max-retries", "10");
DynamicTableSource actual = createTableSource(SCHEMA, properties);
JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl("jdbc:derby:memory:mydb").setTableName("mytable").build();
JdbcLookupOptions lookupOptions = JdbcLookupOptions.builder().setCacheMaxSize(1000).setCacheExpireMs(10_000).setMaxRetryTimes(10).build();
JdbcDynamicTableSource expected = new JdbcDynamicTableSource(options, JdbcReadOptions.builder().build(), lookupOptions, SCHEMA.toPhysicalRowDataType());
assertEquals(expected, actual);
}
use of org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions in project flink by apache.
the class JdbcDynamicTableFactoryTest method testJdbcCommonProperties.
@Test
public void testJdbcCommonProperties() {
Map<String, String> properties = getAllOptions();
properties.put("driver", "org.apache.derby.jdbc.EmbeddedDriver");
properties.put("username", "user");
properties.put("password", "pass");
properties.put("connection.max-retry-timeout", "120s");
// validation for source
DynamicTableSource actualSource = createTableSource(SCHEMA, properties);
JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl("jdbc:derby:memory:mydb").setTableName("mytable").setDriverName("org.apache.derby.jdbc.EmbeddedDriver").setUsername("user").setPassword("pass").setConnectionCheckTimeoutSeconds(120).build();
JdbcLookupOptions lookupOptions = JdbcLookupOptions.builder().setCacheMaxSize(-1).setCacheExpireMs(10_000).setMaxRetryTimes(3).build();
JdbcDynamicTableSource expectedSource = new JdbcDynamicTableSource(options, JdbcReadOptions.builder().build(), lookupOptions, SCHEMA.toPhysicalRowDataType());
assertEquals(expectedSource, actualSource);
// validation for sink
DynamicTableSink actualSink = createTableSink(SCHEMA, properties);
// default flush configurations
JdbcExecutionOptions executionOptions = JdbcExecutionOptions.builder().withBatchSize(100).withBatchIntervalMs(1000).withMaxRetries(3).build();
JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(options.getTableName()).withDialect(options.getDialect()).withFieldNames(SCHEMA.getColumnNames().toArray(new String[0])).withKeyFields("bbb", "aaa").build();
JdbcDynamicTableSink expectedSink = new JdbcDynamicTableSink(options, executionOptions, dmlOptions, SCHEMA.toPhysicalRowDataType());
assertEquals(expectedSink, actualSink);
}
use of org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions in project flink by apache.
the class JdbcDynamicTableFactoryTest method testJDBCSinkWithParallelism.
@Test
public void testJDBCSinkWithParallelism() {
Map<String, String> properties = getAllOptions();
properties.put("sink.parallelism", "2");
DynamicTableSink actual = createTableSink(SCHEMA, properties);
JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl("jdbc:derby:memory:mydb").setTableName("mytable").setParallelism(2).build();
JdbcExecutionOptions executionOptions = JdbcExecutionOptions.builder().withBatchSize(100).withBatchIntervalMs(1000).withMaxRetries(3).build();
JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(options.getTableName()).withDialect(options.getDialect()).withFieldNames(SCHEMA.getColumnNames().toArray(new String[0])).withKeyFields("bbb", "aaa").build();
JdbcDynamicTableSink expected = new JdbcDynamicTableSink(options, executionOptions, dmlOptions, SCHEMA.toPhysicalRowDataType());
assertEquals(expected, actual);
}
use of org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions in project flink by apache.
the class JdbcOutputFormatTest method testJdbcOutputFormat.
@Test
public void testJdbcOutputFormat() throws IOException, SQLException {
JdbcConnectorOptions jdbcOptions = JdbcConnectorOptions.builder().setDriverName(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setTableName(OUTPUT_TABLE).build();
JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(jdbcOptions.getTableName()).withDialect(jdbcOptions.getDialect()).withFieldNames(fieldNames).build();
outputFormat = new JdbcOutputFormatBuilder().setJdbcOptions(jdbcOptions).setFieldDataTypes(fieldDataTypes).setJdbcDmlOptions(dmlOptions).setJdbcExecutionOptions(JdbcExecutionOptions.builder().build()).setRowDataTypeInfo(rowDataTypeInfo).build();
setRuntimeContext(outputFormat, true);
outputFormat.open(0, 1);
setRuntimeContext(outputFormat, true);
outputFormat.open(0, 1);
for (TestEntry entry : TEST_DATA) {
outputFormat.writeRecord(buildGenericData(entry.id, entry.title, entry.author, entry.price, entry.qty));
}
outputFormat.close();
try (Connection dbConn = DriverManager.getConnection(DERBY_EBOOKSHOP_DB.getUrl());
PreparedStatement statement = dbConn.prepareStatement(SELECT_ALL_NEWBOOKS);
ResultSet resultSet = statement.executeQuery()) {
int recordCount = 0;
while (resultSet.next()) {
assertEquals(TEST_DATA[recordCount].id, resultSet.getObject("id"));
assertEquals(TEST_DATA[recordCount].title, resultSet.getObject("title"));
assertEquals(TEST_DATA[recordCount].author, resultSet.getObject("author"));
assertEquals(TEST_DATA[recordCount].price, resultSet.getObject("price"));
assertEquals(TEST_DATA[recordCount].qty, resultSet.getObject("qty"));
recordCount++;
}
assertEquals(TEST_DATA.length, recordCount);
}
}
Aggregations