use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.
the class FilterPushDownSpec method apply.
public static SupportsFilterPushDown.Result apply(List<RexNode> predicates, DynamicTableSource tableSource, SourceAbilityContext context) {
if (tableSource instanceof SupportsFilterPushDown) {
RexNodeToExpressionConverter converter = new RexNodeToExpressionConverter(new RexBuilder(FlinkTypeFactory.INSTANCE()), context.getSourceRowType().getFieldNames().toArray(new String[0]), context.getFunctionCatalog(), context.getCatalogManager(), TimeZone.getTimeZone(context.getTableConfig().getLocalTimeZone()));
List<Expression> filters = predicates.stream().map(p -> {
scala.Option<ResolvedExpression> expr = p.accept(converter);
if (expr.isDefined()) {
return expr.get();
} else {
throw new TableException(String.format("%s can not be converted to Expression, please make sure %s can accept %s.", p.toString(), tableSource.getClass().getSimpleName(), p.toString()));
}
}).collect(Collectors.toList());
ExpressionResolver resolver = ExpressionResolver.resolverFor(context.getTableConfig(), name -> Optional.empty(), context.getFunctionCatalog().asLookup(str -> {
throw new TableException("We should not need to lookup any expressions at this point");
}), context.getCatalogManager().getDataTypeFactory(), (sqlExpression, inputRowType, outputType) -> {
throw new TableException("SQL expression parsing is not supported at this location.");
}).build();
return ((SupportsFilterPushDown) tableSource).applyFilters(resolver.resolve(filters));
} else {
throw new TableException(String.format("%s does not support SupportsFilterPushDown.", tableSource.getClass().getName()));
}
}
use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.
the class FactoryUtilTest method testDiscoveryForSeparateSourceSinkFactory.
@Test
public void testDiscoveryForSeparateSourceSinkFactory() {
final Map<String, String> options = createAllOptions();
// the "test" source and sink factory is not in one factory class
// see TestDynamicTableSinkFactory and TestDynamicTableSourceFactory
options.put("connector", "test");
final DynamicTableSource actualSource = createTableSource(SCHEMA, options);
final DynamicTableSource expectedSource = new DynamicTableSourceMock("MyTarget", null, new DecodingFormatMock(",", false), new DecodingFormatMock("|", true));
assertThat(actualSource).isEqualTo(expectedSource);
final DynamicTableSink actualSink = createTableSink(SCHEMA, options);
final DynamicTableSink expectedSink = new DynamicTableSinkMock("MyTarget", 1000L, new EncodingFormatMock(","), new EncodingFormatMock("|"));
assertThat(actualSink).isEqualTo(expectedSink);
}
use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.
the class FactoryUtilTest method testAlternativeValueFormat.
@Test
public void testAlternativeValueFormat() {
final Map<String, String> options = createAllOptions();
options.remove("value.format");
options.remove("value.test-format.delimiter");
options.remove("value.test-format.fail-on-missing");
options.put("format", "test-format");
options.put("test-format.delimiter", ";");
options.put("test-format.fail-on-missing", "true");
final DynamicTableSource actualSource = createTableSource(SCHEMA, options);
final DynamicTableSource expectedSource = new DynamicTableSourceMock("MyTarget", null, new DecodingFormatMock(",", false), new DecodingFormatMock(";", true));
assertThat(actualSource).isEqualTo(expectedSource);
final DynamicTableSink actualSink = createTableSink(SCHEMA, options);
final DynamicTableSink expectedSink = new DynamicTableSinkMock("MyTarget", 1000L, new EncodingFormatMock(","), new EncodingFormatMock(";"));
assertThat(actualSink).isEqualTo(expectedSink);
}
use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.
the class FactoryUtilTest method testOptionalFormat.
@Test
public void testOptionalFormat() {
final Map<String, String> options = createAllOptions();
options.remove("key.format");
options.remove("key.test-format.delimiter");
final DynamicTableSource actualSource = createTableSource(SCHEMA, options);
final DynamicTableSource expectedSource = new DynamicTableSourceMock("MyTarget", null, null, new DecodingFormatMock("|", true));
assertThat(actualSource).isEqualTo(expectedSource);
final DynamicTableSink actualSink = createTableSink(SCHEMA, options);
final DynamicTableSink expectedSink = new DynamicTableSinkMock("MyTarget", 1000L, null, new EncodingFormatMock("|"));
assertThat(actualSink).isEqualTo(expectedSink);
}
use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.
the class DynamicSourceUtils method convertDataStreamToRel.
/**
* Converts a given {@link DataStream} to a {@link RelNode}. It adds helper projections if
* necessary.
*/
public static RelNode convertDataStreamToRel(boolean isBatchMode, ReadableConfig config, FlinkRelBuilder relBuilder, ContextResolvedTable contextResolvedTable, DataStream<?> dataStream, DataType physicalDataType, boolean isTopLevelRecord, ChangelogMode changelogMode) {
final DynamicTableSource tableSource = new ExternalDynamicSource<>(contextResolvedTable.getIdentifier(), dataStream, physicalDataType, isTopLevelRecord, changelogMode);
final FlinkStatistic statistic = FlinkStatistic.unknown(contextResolvedTable.getResolvedSchema()).build();
return convertSourceToRel(isBatchMode, config, relBuilder, contextResolvedTable, statistic, Collections.emptyList(), tableSource);
}
Aggregations