use of org.apache.flink.table.connector.source.abilities.SupportsReadingMetadata in project flink by apache.
the class PushProjectIntoTableSourceScanRule method matches.
@Override
public boolean matches(RelOptRuleCall call) {
final LogicalTableScan scan = call.rel(1);
final TableSourceTable sourceTable = scan.getTable().unwrap(TableSourceTable.class);
if (sourceTable == null) {
return false;
}
final DynamicTableSource source = sourceTable.tableSource();
// The source supports projection push-down.
if (supportsProjectionPushDown(source)) {
return Arrays.stream(sourceTable.abilitySpecs()).noneMatch(spec -> spec instanceof ProjectPushDownSpec);
}
// (for physical columns) is not supported.
if (supportsMetadata(source)) {
if (Arrays.stream(sourceTable.abilitySpecs()).anyMatch(spec -> spec instanceof ReadingMetadataSpec)) {
return false;
}
return ((SupportsReadingMetadata) source).supportsMetadataProjection();
}
return false;
}
use of org.apache.flink.table.connector.source.abilities.SupportsReadingMetadata in project flink by apache.
the class DynamicSourceUtils method validateAndApplyMetadata.
private static void validateAndApplyMetadata(String tableDebugName, ResolvedSchema schema, DynamicTableSource source) {
final List<MetadataColumn> metadataColumns = extractMetadataColumns(schema);
if (metadataColumns.isEmpty()) {
return;
}
if (!(source instanceof SupportsReadingMetadata)) {
throw new ValidationException(String.format("Table '%s' declares metadata columns, but the underlying %s doesn't implement " + "the %s interface. Therefore, metadata cannot be read from the given source.", source.asSummaryString(), DynamicTableSource.class.getSimpleName(), SupportsReadingMetadata.class.getSimpleName()));
}
final SupportsReadingMetadata metadataSource = (SupportsReadingMetadata) source;
final Map<String, DataType> metadataMap = metadataSource.listReadableMetadata();
metadataColumns.forEach(c -> {
final String metadataKey = c.getMetadataKey().orElse(c.getName());
final LogicalType metadataType = c.getDataType().getLogicalType();
final DataType expectedMetadataDataType = metadataMap.get(metadataKey);
// check that metadata key is valid
if (expectedMetadataDataType == null) {
throw new ValidationException(String.format("Invalid metadata key '%s' in column '%s' of table '%s'. " + "The %s class '%s' supports the following metadata keys for reading:\n%s", metadataKey, c.getName(), tableDebugName, DynamicTableSource.class.getSimpleName(), source.getClass().getName(), String.join("\n", metadataMap.keySet())));
}
// check that types are compatible
if (!supportsExplicitCast(expectedMetadataDataType.getLogicalType(), metadataType)) {
if (metadataKey.equals(c.getName())) {
throw new ValidationException(String.format("Invalid data type for metadata column '%s' of table '%s'. " + "The column cannot be declared as '%s' because the type must be " + "castable from metadata type '%s'.", c.getName(), tableDebugName, expectedMetadataDataType.getLogicalType(), metadataType));
} else {
throw new ValidationException(String.format("Invalid data type for metadata column '%s' with metadata key '%s' of table '%s'. " + "The column cannot be declared as '%s' because the type must be " + "castable from metadata type '%s'.", c.getName(), metadataKey, tableDebugName, expectedMetadataDataType.getLogicalType(), metadataType));
}
}
});
metadataSource.applyReadableMetadata(createRequiredMetadataKeys(schema, source), TypeConversions.fromLogicalToDataType(createProducedType(schema, source)));
}
use of org.apache.flink.table.connector.source.abilities.SupportsReadingMetadata in project flink by apache.
the class ReadingMetadataSpec method apply.
@Override
public void apply(DynamicTableSource tableSource, SourceAbilityContext context) {
if (tableSource instanceof SupportsReadingMetadata) {
checkArgument(getProducedType().isPresent());
DataType producedDataType = TypeConversions.fromLogicalToDataType(getProducedType().get());
((SupportsReadingMetadata) tableSource).applyReadableMetadata(metadataKeys, producedDataType);
} else {
throw new TableException(String.format("%s does not support SupportsReadingMetadata.", tableSource.getClass().getName()));
}
}
use of org.apache.flink.table.connector.source.abilities.SupportsReadingMetadata in project flink by apache.
the class DynamicSourceUtils method pushMetadataProjection.
/**
* Creates a projection that reorders physical and metadata columns according to the given
* schema. It casts metadata columns into the expected data type to be accessed by computed
* columns in the next step. Computed columns are ignored here.
*
* @see SupportsReadingMetadata
*/
private static void pushMetadataProjection(FlinkRelBuilder relBuilder, ResolvedSchema schema) {
final RexBuilder rexBuilder = relBuilder.getRexBuilder();
final List<String> fieldNames = schema.getColumns().stream().filter(c -> !(c instanceof ComputedColumn)).map(Column::getName).collect(Collectors.toList());
final List<RexNode> fieldNodes = schema.getColumns().stream().filter(c -> !(c instanceof ComputedColumn)).map(c -> {
final RelDataType relDataType = relBuilder.getTypeFactory().createFieldTypeFromLogicalType(c.getDataType().getLogicalType());
if (c instanceof MetadataColumn) {
final MetadataColumn metadataColumn = (MetadataColumn) c;
final String metadataKey = metadataColumn.getMetadataKey().orElse(metadataColumn.getName());
return rexBuilder.makeAbstractCast(relDataType, relBuilder.field(metadataKey));
} else {
return relBuilder.field(c.getName());
}
}).collect(Collectors.toList());
relBuilder.projectNamed(fieldNodes, fieldNames, true);
}
Aggregations