use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.
the class HBaseDynamicTableFactoryTest method testLookupOptions.
@Test
public void testLookupOptions() {
Map<String, String> options = getAllOptions();
options.put("lookup.cache.max-rows", "1000");
options.put("lookup.cache.ttl", "10s");
options.put("lookup.max-retries", "10");
ResolvedSchema schema = ResolvedSchema.of(Column.physical(ROWKEY, STRING()), Column.physical(FAMILY1, ROW(FIELD(COL1, DOUBLE()), FIELD(COL2, INT()))));
DynamicTableSource source = createTableSource(schema, options);
HBaseLookupOptions actual = ((HBaseDynamicTableSource) source).getLookupOptions();
HBaseLookupOptions expected = HBaseLookupOptions.builder().setCacheMaxSize(1000).setCacheExpireMs(10_000).setMaxRetryTimes(10).build();
assertEquals(expected, actual);
}
use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.
the class HiveDynamicTableFactory method createDynamicTableSource.
@Override
public DynamicTableSource createDynamicTableSource(Context context) {
final ReadableConfig configuration = Configuration.fromMap(context.getCatalogTable().getOptions());
final boolean isHiveTable = HiveCatalog.isHiveTable(context.getCatalogTable().getOptions());
// we don't support temporary hive tables yet
if (!isHiveTable || context.isTemporary()) {
DynamicTableSource source = FactoryUtil.createDynamicTableSource(null, context.getObjectIdentifier(), context.getCatalogTable(), context.getConfiguration(), context.getClassLoader(), context.isTemporary());
if (source instanceof RequireCatalogLock) {
((RequireCatalogLock) source).setLockFactory(HiveCatalogLock.createFactory(hiveConf));
}
return source;
}
final CatalogTable catalogTable = Preconditions.checkNotNull(context.getCatalogTable());
final boolean isStreamingSource = configuration.get(STREAMING_SOURCE_ENABLE);
final boolean includeAllPartition = STREAMING_SOURCE_PARTITION_INCLUDE.defaultValue().equals(configuration.get(STREAMING_SOURCE_PARTITION_INCLUDE));
final JobConf jobConf = JobConfUtils.createJobConfWithCredentials(hiveConf);
// hive table source that has not lookup ability
if (isStreamingSource && includeAllPartition) {
return new HiveTableSource(jobConf, context.getConfiguration(), context.getObjectIdentifier().toObjectPath(), catalogTable);
} else {
// hive table source that has scan and lookup ability
return new HiveLookupTableSource(jobConf, context.getConfiguration(), context.getObjectIdentifier().toObjectPath(), catalogTable);
}
}
use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.
the class HBaseDynamicTableFactoryTest method testTableSourceFactory.
@SuppressWarnings("rawtypes")
@Test
public void testTableSourceFactory() {
ResolvedSchema schema = ResolvedSchema.of(Column.physical(FAMILY1, ROW(FIELD(COL1, INT()))), Column.physical(FAMILY2, ROW(FIELD(COL1, INT()), FIELD(COL2, BIGINT()))), Column.physical(ROWKEY, BIGINT()), Column.physical(FAMILY3, ROW(FIELD(COL1, DOUBLE()), FIELD(COL2, BOOLEAN()), FIELD(COL3, STRING()))), Column.physical(FAMILY4, ROW(FIELD(COL1, DECIMAL(10, 3)), FIELD(COL2, TIMESTAMP(3)), FIELD(COL3, DATE()), FIELD(COL4, TIME()))));
DynamicTableSource source = createTableSource(schema, getAllOptions());
assertTrue(source instanceof HBaseDynamicTableSource);
HBaseDynamicTableSource hbaseSource = (HBaseDynamicTableSource) source;
int[][] lookupKey = { { 2 } };
LookupTableSource.LookupRuntimeProvider lookupProvider = hbaseSource.getLookupRuntimeProvider(new LookupRuntimeProviderContext(lookupKey));
assertTrue(lookupProvider instanceof TableFunctionProvider);
TableFunction tableFunction = ((TableFunctionProvider) lookupProvider).createTableFunction();
assertTrue(tableFunction instanceof HBaseRowDataLookupFunction);
assertEquals("testHBastTable", ((HBaseRowDataLookupFunction) tableFunction).getHTableName());
HBaseTableSchema hbaseSchema = hbaseSource.getHBaseTableSchema();
assertEquals(2, hbaseSchema.getRowKeyIndex());
assertEquals(Optional.of(Types.LONG), hbaseSchema.getRowKeyTypeInfo());
assertArrayEquals(new String[] { "f1", "f2", "f3", "f4" }, hbaseSchema.getFamilyNames());
assertArrayEquals(new String[] { "c1" }, hbaseSchema.getQualifierNames("f1"));
assertArrayEquals(new String[] { "c1", "c2" }, hbaseSchema.getQualifierNames("f2"));
assertArrayEquals(new String[] { "c1", "c2", "c3" }, hbaseSchema.getQualifierNames("f3"));
assertArrayEquals(new String[] { "c1", "c2", "c3", "c4" }, hbaseSchema.getQualifierNames("f4"));
assertArrayEquals(new DataType[] { INT() }, hbaseSchema.getQualifierDataTypes("f1"));
assertArrayEquals(new DataType[] { INT(), BIGINT() }, hbaseSchema.getQualifierDataTypes("f2"));
assertArrayEquals(new DataType[] { DOUBLE(), BOOLEAN(), STRING() }, hbaseSchema.getQualifierDataTypes("f3"));
assertArrayEquals(new DataType[] { DECIMAL(10, 3), TIMESTAMP(3), DATE(), TIME() }, hbaseSchema.getQualifierDataTypes("f4"));
}
use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.
the class HBaseDynamicTableFactoryTest method testLookupOptions.
@Test
public void testLookupOptions() {
Map<String, String> options = getAllOptions();
options.put("lookup.cache.max-rows", "1000");
options.put("lookup.cache.ttl", "10s");
options.put("lookup.max-retries", "10");
ResolvedSchema schema = ResolvedSchema.of(Column.physical(ROWKEY, STRING()), Column.physical(FAMILY1, ROW(FIELD(COL1, DOUBLE()), FIELD(COL2, INT()))));
DynamicTableSource source = createTableSource(schema, options);
HBaseLookupOptions actual = ((HBaseDynamicTableSource) source).getLookupOptions();
HBaseLookupOptions expected = HBaseLookupOptions.builder().setCacheMaxSize(1000).setCacheExpireMs(10_000).setMaxRetryTimes(10).build();
assertEquals(expected, actual);
}
use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.
the class RegistryAvroFormatFactoryTest method testDeserializationSchema.
@Test
public void testDeserializationSchema() {
final AvroRowDataDeserializationSchema expectedDeser = new AvroRowDataDeserializationSchema(ConfluentRegistryAvroDeserializationSchema.forGeneric(AvroSchemaConverter.convertToSchema(ROW_TYPE), REGISTRY_URL), AvroToRowDataConverters.createRowConverter(ROW_TYPE), InternalTypeInfo.of(ROW_TYPE));
final DynamicTableSource actualSource = createTableSource(SCHEMA, getDefaultOptions());
assertThat(actualSource, instanceOf(TestDynamicTableFactory.DynamicTableSourceMock.class));
TestDynamicTableFactory.DynamicTableSourceMock scanSourceMock = (TestDynamicTableFactory.DynamicTableSourceMock) actualSource;
DeserializationSchema<RowData> actualDeser = scanSourceMock.valueFormat.createRuntimeDecoder(ScanRuntimeProviderContext.INSTANCE, SCHEMA.toPhysicalRowDataType());
assertEquals(expectedDeser, actualDeser);
}
Aggregations