use of org.apache.flink.table.planner.plan.abilities.source.FilterPushDownSpec in project flink by apache.
the class PushFilterIntoSourceScanRuleBase method resolveFiltersAndCreateTableSourceTable.
/**
* Resolves filters using the underlying sources {@link SupportsFilterPushDown} and creates a
* new {@link TableSourceTable} with the supplied predicates.
*
* @param convertiblePredicates Predicates to resolve
* @param oldTableSourceTable TableSourceTable to copy
* @param scan Underlying table scan to push to
* @param relBuilder Builder to push the scan to
* @return A tuple, constituting of the resolved filters and the newly created {@link
* TableSourceTable}
*/
protected Tuple2<SupportsFilterPushDown.Result, TableSourceTable> resolveFiltersAndCreateTableSourceTable(RexNode[] convertiblePredicates, TableSourceTable oldTableSourceTable, TableScan scan, RelBuilder relBuilder) {
// record size before applyFilters for update statistics
int originPredicatesSize = convertiblePredicates.length;
// update DynamicTableSource
DynamicTableSource newTableSource = oldTableSourceTable.tableSource().copy();
SupportsFilterPushDown.Result result = FilterPushDownSpec.apply(Arrays.asList(convertiblePredicates), newTableSource, SourceAbilityContext.from(scan));
relBuilder.push(scan);
List<RexNode> acceptedPredicates = convertExpressionToRexNode(result.getAcceptedFilters(), relBuilder);
FilterPushDownSpec filterPushDownSpec = new FilterPushDownSpec(acceptedPredicates);
// record size after applyFilters for update statistics
int updatedPredicatesSize = result.getRemainingFilters().size();
// set the newStatistic newTableSource and sourceAbilitySpecs
TableSourceTable newTableSourceTable = oldTableSourceTable.copy(newTableSource, getNewFlinkStatistic(oldTableSourceTable, originPredicatesSize, updatedPredicatesSize), new SourceAbilitySpec[] { filterPushDownSpec });
return new Tuple2<>(result, newTableSourceTable);
}
use of org.apache.flink.table.planner.plan.abilities.source.FilterPushDownSpec in project flink by apache.
the class DynamicTableSourceSpecSerdeTest method testDynamicTableSinkSpecSerde.
public static Stream<DynamicTableSourceSpec> testDynamicTableSinkSpecSerde() {
Map<String, String> options1 = new HashMap<>();
options1.put("connector", FileSystemTableFactory.IDENTIFIER);
options1.put("format", TestCsvFormatFactory.IDENTIFIER);
options1.put("path", "/tmp");
final ResolvedSchema resolvedSchema1 = new ResolvedSchema(Collections.singletonList(Column.physical("a", DataTypes.BIGINT())), Collections.emptyList(), null);
final CatalogTable catalogTable1 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema1).build(), null, Collections.emptyList(), options1);
DynamicTableSourceSpec spec1 = new DynamicTableSourceSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable1, resolvedSchema1)), null);
Map<String, String> options2 = new HashMap<>();
options2.put("connector", TestValuesTableFactory.IDENTIFIER);
options2.put("disable-lookup", "true");
options2.put("enable-watermark-push-down", "true");
options2.put("filterable-fields", "b");
options2.put("bounded", "false");
options2.put("readable-metadata", "m1:INT, m2:STRING");
final ResolvedSchema resolvedSchema2 = new ResolvedSchema(Arrays.asList(Column.physical("a", DataTypes.BIGINT()), Column.physical("b", DataTypes.INT()), Column.physical("c", DataTypes.STRING()), Column.physical("p", DataTypes.STRING()), Column.metadata("m1", DataTypes.INT(), null, false), Column.metadata("m2", DataTypes.STRING(), null, false), Column.physical("ts", DataTypes.TIMESTAMP(3))), Collections.emptyList(), null);
final CatalogTable catalogTable2 = CatalogTable.of(Schema.newBuilder().fromResolvedSchema(resolvedSchema2).build(), null, Collections.emptyList(), options2);
FlinkTypeFactory factory = FlinkTypeFactory.INSTANCE();
RexBuilder rexBuilder = new RexBuilder(factory);
DynamicTableSourceSpec spec2 = new DynamicTableSourceSpec(ContextResolvedTable.temporary(ObjectIdentifier.of(DEFAULT_BUILTIN_CATALOG, DEFAULT_BUILTIN_DATABASE, "MyTable"), new ResolvedCatalogTable(catalogTable2, resolvedSchema2)), Arrays.asList(new ProjectPushDownSpec(new int[][] { { 0 }, { 1 }, { 4 }, { 6 } }, RowType.of(new LogicalType[] { new BigIntType(), new IntType(), new IntType(), new TimestampType(3) }, new String[] { "a", "b", "m1", "ts" })), new ReadingMetadataSpec(Arrays.asList("m1", "m2"), RowType.of(new LogicalType[] { new BigIntType(), new IntType(), new IntType(), new TimestampType(3) }, new String[] { "a", "b", "m1", "ts" })), new FilterPushDownSpec(Collections.singletonList(// b >= 10
rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, rexBuilder.makeInputRef(factory.createSqlType(SqlTypeName.INTEGER), 1), rexBuilder.makeExactLiteral(new BigDecimal(10))))), new WatermarkPushDownSpec(rexBuilder.makeCall(SqlStdOperatorTable.MINUS, rexBuilder.makeInputRef(factory.createSqlType(SqlTypeName.TIMESTAMP, 3), 3), rexBuilder.makeIntervalLiteral(BigDecimal.valueOf(1000), new SqlIntervalQualifier(TimeUnit.SECOND, 2, TimeUnit.SECOND, 6, SqlParserPos.ZERO))), 5000, RowType.of(new BigIntType(), new IntType(), new IntType(), new TimestampType(false, TimestampKind.ROWTIME, 3))), new SourceWatermarkSpec(true, RowType.of(new BigIntType(), new IntType(), new IntType(), new TimestampType(false, TimestampKind.ROWTIME, 3))), new LimitPushDownSpec(100), new PartitionPushDownSpec(Arrays.asList(new HashMap<String, String>() {
{
put("p", "A");
}
}, new HashMap<String, String>() {
{
put("p", "B");
}
}))));
return Stream.of(spec1, spec2);
}
Aggregations