use of org.apache.flink.table.connector.RequireCatalogLock in project flink by apache.
the class HiveDynamicTableFactory method createDynamicTableSource.
@Override
public DynamicTableSource createDynamicTableSource(Context context) {
final ReadableConfig configuration = Configuration.fromMap(context.getCatalogTable().getOptions());
final boolean isHiveTable = HiveCatalog.isHiveTable(context.getCatalogTable().getOptions());
// we don't support temporary hive tables yet
if (!isHiveTable || context.isTemporary()) {
DynamicTableSource source = FactoryUtil.createDynamicTableSource(null, context.getObjectIdentifier(), context.getCatalogTable(), context.getConfiguration(), context.getClassLoader(), context.isTemporary());
if (source instanceof RequireCatalogLock) {
((RequireCatalogLock) source).setLockFactory(HiveCatalogLock.createFactory(hiveConf));
}
return source;
}
final CatalogTable catalogTable = Preconditions.checkNotNull(context.getCatalogTable());
final boolean isStreamingSource = configuration.get(STREAMING_SOURCE_ENABLE);
final boolean includeAllPartition = STREAMING_SOURCE_PARTITION_INCLUDE.defaultValue().equals(configuration.get(STREAMING_SOURCE_PARTITION_INCLUDE));
final JobConf jobConf = JobConfUtils.createJobConfWithCredentials(hiveConf);
// hive table source that has not lookup ability
if (isStreamingSource && includeAllPartition) {
return new HiveTableSource(jobConf, context.getConfiguration(), context.getObjectIdentifier().toObjectPath(), catalogTable);
} else {
// hive table source that has scan and lookup ability
return new HiveLookupTableSource(jobConf, context.getConfiguration(), context.getObjectIdentifier().toObjectPath(), catalogTable);
}
}
use of org.apache.flink.table.connector.RequireCatalogLock in project flink by apache.
the class HiveDynamicTableFactory method createDynamicTableSink.
@Override
public DynamicTableSink createDynamicTableSink(Context context) {
final boolean isHiveTable = HiveCatalog.isHiveTable(context.getCatalogTable().getOptions());
// we don't support temporary hive tables yet
if (!isHiveTable || context.isTemporary()) {
DynamicTableSink sink = FactoryUtil.createDynamicTableSink(null, context.getObjectIdentifier(), context.getCatalogTable(), context.getConfiguration(), context.getClassLoader(), context.isTemporary());
if (sink instanceof RequireCatalogLock) {
((RequireCatalogLock) sink).setLockFactory(HiveCatalogLock.createFactory(hiveConf));
}
return sink;
}
final Integer configuredParallelism = Configuration.fromMap(context.getCatalogTable().getOptions()).get(FileSystemConnectorOptions.SINK_PARALLELISM);
final JobConf jobConf = JobConfUtils.createJobConfWithCredentials(hiveConf);
return new HiveTableSink(context.getConfiguration(), jobConf, context.getObjectIdentifier(), context.getCatalogTable(), configuredParallelism);
}
Aggregations