use of org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions in project flink by splunk.
the class JdbcDynamicTableFactoryTest method testJdbcLookupProperties.
@Test
public void testJdbcLookupProperties() {
Map<String, String> properties = getAllOptions();
properties.put("lookup.cache.max-rows", "1000");
properties.put("lookup.cache.ttl", "10s");
properties.put("lookup.max-retries", "10");
DynamicTableSource actual = createTableSource(SCHEMA, properties);
JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl("jdbc:derby:memory:mydb").setTableName("mytable").build();
JdbcLookupOptions lookupOptions = JdbcLookupOptions.builder().setCacheMaxSize(1000).setCacheExpireMs(10_000).setMaxRetryTimes(10).build();
JdbcDynamicTableSource expected = new JdbcDynamicTableSource(options, JdbcReadOptions.builder().build(), lookupOptions, SCHEMA.toPhysicalRowDataType());
assertEquals(expected, actual);
}
use of org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions in project flink by splunk.
the class JdbcDynamicTableFactoryTest method testJdbcLookupPropertiesWithExcludeEmptyResult.
@Test
public void testJdbcLookupPropertiesWithExcludeEmptyResult() {
Map<String, String> properties = getAllOptions();
properties.put("lookup.cache.max-rows", "1000");
properties.put("lookup.cache.ttl", "10s");
properties.put("lookup.max-retries", "10");
properties.put("lookup.cache.caching-missing-key", "true");
DynamicTableSource actual = createTableSource(SCHEMA, properties);
JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl("jdbc:derby:memory:mydb").setTableName("mytable").build();
JdbcLookupOptions lookupOptions = JdbcLookupOptions.builder().setCacheMaxSize(1000).setCacheExpireMs(10_000).setMaxRetryTimes(10).setCacheMissingKey(true).build();
JdbcDynamicTableSource expected = new JdbcDynamicTableSource(options, JdbcReadOptions.builder().build(), lookupOptions, SCHEMA.toPhysicalRowDataType());
assertEquals(expected, actual);
}
use of org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions in project flink by splunk.
the class JdbcDynamicTableFactoryTest method testJdbcCommonProperties.
@Test
public void testJdbcCommonProperties() {
Map<String, String> properties = getAllOptions();
properties.put("driver", "org.apache.derby.jdbc.EmbeddedDriver");
properties.put("username", "user");
properties.put("password", "pass");
properties.put("connection.max-retry-timeout", "120s");
// validation for source
DynamicTableSource actualSource = createTableSource(SCHEMA, properties);
JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl("jdbc:derby:memory:mydb").setTableName("mytable").setDriverName("org.apache.derby.jdbc.EmbeddedDriver").setUsername("user").setPassword("pass").setConnectionCheckTimeoutSeconds(120).build();
JdbcLookupOptions lookupOptions = JdbcLookupOptions.builder().setCacheMaxSize(-1).setCacheExpireMs(10_000).setMaxRetryTimes(3).build();
JdbcDynamicTableSource expectedSource = new JdbcDynamicTableSource(options, JdbcReadOptions.builder().build(), lookupOptions, SCHEMA.toPhysicalRowDataType());
assertEquals(expectedSource, actualSource);
// validation for sink
DynamicTableSink actualSink = createTableSink(SCHEMA, properties);
// default flush configurations
JdbcExecutionOptions executionOptions = JdbcExecutionOptions.builder().withBatchSize(100).withBatchIntervalMs(1000).withMaxRetries(3).build();
JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(options.getTableName()).withDialect(options.getDialect()).withFieldNames(SCHEMA.getColumnNames().toArray(new String[0])).withKeyFields("bbb", "aaa").build();
JdbcDynamicTableSink expectedSink = new JdbcDynamicTableSink(options, executionOptions, dmlOptions, SCHEMA.toPhysicalRowDataType());
assertEquals(expectedSink, actualSink);
}
use of org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions in project flink by splunk.
the class JdbcDynamicTableFactoryTest method testJdbcSinkProperties.
@Test
public void testJdbcSinkProperties() {
Map<String, String> properties = getAllOptions();
properties.put("sink.buffer-flush.max-rows", "1000");
properties.put("sink.buffer-flush.interval", "2min");
properties.put("sink.max-retries", "5");
DynamicTableSink actual = createTableSink(SCHEMA, properties);
JdbcConnectorOptions options = JdbcConnectorOptions.builder().setDBUrl("jdbc:derby:memory:mydb").setTableName("mytable").build();
JdbcExecutionOptions executionOptions = JdbcExecutionOptions.builder().withBatchSize(1000).withBatchIntervalMs(120_000).withMaxRetries(5).build();
JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(options.getTableName()).withDialect(options.getDialect()).withFieldNames(SCHEMA.getColumnNames().toArray(new String[0])).withKeyFields("bbb", "aaa").build();
JdbcDynamicTableSink expected = new JdbcDynamicTableSink(options, executionOptions, dmlOptions, SCHEMA.toPhysicalRowDataType());
assertEquals(expected, actual);
}
use of org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions in project flink by splunk.
the class JdbcOutputFormatTest method testExceptionOnInvalidType.
@Test
public void testExceptionOnInvalidType() {
try {
JdbcConnectorOptions jdbcOptions = JdbcConnectorOptions.builder().setDriverName(DERBY_EBOOKSHOP_DB.getDriverClass()).setDBUrl(DERBY_EBOOKSHOP_DB.getUrl()).setTableName(OUTPUT_TABLE).build();
JdbcDmlOptions dmlOptions = JdbcDmlOptions.builder().withTableName(jdbcOptions.getTableName()).withDialect(jdbcOptions.getDialect()).withFieldNames(fieldNames).build();
outputFormat = new JdbcOutputFormatBuilder().setJdbcOptions(jdbcOptions).setFieldDataTypes(fieldDataTypes).setJdbcDmlOptions(dmlOptions).setJdbcExecutionOptions(JdbcExecutionOptions.builder().build()).setRowDataTypeInfo(rowDataTypeInfo).build();
setRuntimeContext(outputFormat, false);
outputFormat.open(0, 1);
TestEntry entry = TEST_DATA[0];
RowData row = buildGenericData(entry.id, entry.title, entry.author, 0L, entry.qty);
outputFormat.writeRecord(row);
outputFormat.close();
fail("Expected exception is not thrown.");
} catch (Exception e) {
assertTrue(findThrowable(e, ClassCastException.class).isPresent());
}
}
Aggregations