use of org.apache.flink.configuration.ReadableConfig in project flink by apache.
the class HiveDynamicTableFactory method createDynamicTableSource.
@Override
public DynamicTableSource createDynamicTableSource(Context context) {
final ReadableConfig configuration = Configuration.fromMap(context.getCatalogTable().getOptions());
final boolean isHiveTable = HiveCatalog.isHiveTable(context.getCatalogTable().getOptions());
// we don't support temporary hive tables yet
if (!isHiveTable || context.isTemporary()) {
DynamicTableSource source = FactoryUtil.createDynamicTableSource(null, context.getObjectIdentifier(), context.getCatalogTable(), context.getConfiguration(), context.getClassLoader(), context.isTemporary());
if (source instanceof RequireCatalogLock) {
((RequireCatalogLock) source).setLockFactory(HiveCatalogLock.createFactory(hiveConf));
}
return source;
}
final CatalogTable catalogTable = Preconditions.checkNotNull(context.getCatalogTable());
final boolean isStreamingSource = configuration.get(STREAMING_SOURCE_ENABLE);
final boolean includeAllPartition = STREAMING_SOURCE_PARTITION_INCLUDE.defaultValue().equals(configuration.get(STREAMING_SOURCE_PARTITION_INCLUDE));
final JobConf jobConf = JobConfUtils.createJobConfWithCredentials(hiveConf);
// hive table source that has not lookup ability
if (isStreamingSource && includeAllPartition) {
return new HiveTableSource(jobConf, context.getConfiguration(), context.getObjectIdentifier().toObjectPath(), catalogTable);
} else {
// hive table source that has scan and lookup ability
return new HiveLookupTableSource(jobConf, context.getConfiguration(), context.getObjectIdentifier().toObjectPath(), catalogTable);
}
}
use of org.apache.flink.configuration.ReadableConfig in project flink by apache.
the class HBase2DynamicTableFactory method createDynamicTableSource.
@Override
public DynamicTableSource createDynamicTableSource(Context context) {
TableFactoryHelper helper = createTableFactoryHelper(this, context);
helper.validateExcept(PROPERTIES_PREFIX);
final ReadableConfig tableOptions = helper.getOptions();
validatePrimaryKey(context.getPhysicalRowDataType(), context.getPrimaryKeyIndexes());
String tableName = tableOptions.get(TABLE_NAME);
Configuration hbaseConf = getHBaseConfiguration(tableOptions);
HBaseLookupOptions lookupOptions = getHBaseLookupOptions(tableOptions);
String nullStringLiteral = tableOptions.get(NULL_STRING_LITERAL);
HBaseTableSchema hbaseSchema = HBaseTableSchema.fromDataType(context.getPhysicalRowDataType());
return new HBaseDynamicTableSource(hbaseConf, tableName, hbaseSchema, nullStringLiteral, lookupOptions);
}
use of org.apache.flink.configuration.ReadableConfig in project flink by apache.
the class CsvFileFormatFactory method buildCsvSchema.
private static CsvSchema buildCsvSchema(RowType rowType, ReadableConfig options) {
final CsvSchema csvSchema = CsvRowSchemaConverter.convert(rowType);
final CsvSchema.Builder csvBuilder = csvSchema.rebuild();
// format properties
options.getOptional(FIELD_DELIMITER).map(s -> StringEscapeUtils.unescapeJava(s).charAt(0)).ifPresent(csvBuilder::setColumnSeparator);
if (options.get(DISABLE_QUOTE_CHARACTER)) {
csvBuilder.disableQuoteChar();
} else {
options.getOptional(QUOTE_CHARACTER).map(s -> s.charAt(0)).ifPresent(csvBuilder::setQuoteChar);
}
options.getOptional(ALLOW_COMMENTS).ifPresent(csvBuilder::setAllowComments);
options.getOptional(ARRAY_ELEMENT_DELIMITER).ifPresent(csvBuilder::setArrayElementSeparator);
options.getOptional(ESCAPE_CHARACTER).map(s -> s.charAt(0)).ifPresent(csvBuilder::setEscapeChar);
options.getOptional(NULL_LITERAL).ifPresent(csvBuilder::setNullValue);
return csvBuilder.build();
}
use of org.apache.flink.configuration.ReadableConfig in project flink by apache.
the class CsvFileFormatFactory method createEncodingFormat.
@Override
public EncodingFormat<Factory<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
return new EncodingFormat<BulkWriter.Factory<RowData>>() {
@Override
public BulkWriter.Factory<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType physicalDataType) {
final RowType rowType = (RowType) physicalDataType.getLogicalType();
final CsvSchema schema = buildCsvSchema(rowType, formatOptions);
final RowDataToCsvConverter converter = RowDataToCsvConverters.createRowConverter(rowType);
final CsvMapper mapper = new CsvMapper();
final ObjectNode container = mapper.createObjectNode();
final RowDataToCsvConverter.RowDataToCsvFormatConverterContext converterContext = new RowDataToCsvConverter.RowDataToCsvFormatConverterContext(mapper, container);
return out -> CsvBulkWriter.forSchema(mapper, schema, converter, converterContext, out);
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
};
}
use of org.apache.flink.configuration.ReadableConfig in project flink by apache.
the class SocketDynamicTableFactory method createDynamicTableSource.
@Override
public DynamicTableSource createDynamicTableSource(Context context) {
// either implement your custom validation logic here ...
// or use the provided helper utility
final FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);
// discover a suitable decoding format
final DecodingFormat<DeserializationSchema<RowData>> decodingFormat = helper.discoverDecodingFormat(DeserializationFormatFactory.class, FactoryUtil.FORMAT);
// validate all options
helper.validate();
// get the validated options
final ReadableConfig options = helper.getOptions();
final String hostname = options.get(HOSTNAME);
final int port = options.get(PORT);
final byte byteDelimiter = (byte) (int) options.get(BYTE_DELIMITER);
// derive the produced data type (excluding computed columns) from the catalog table
final DataType producedDataType = context.getCatalogTable().getResolvedSchema().toPhysicalRowDataType();
// create and return dynamic table source
return new SocketDynamicTableSource(hostname, port, byteDelimiter, decodingFormat, producedDataType);
}
Aggregations