use of org.apache.flink.table.catalog.ResolvedSchema in project flink by apache.
the class FlinkCalciteCatalogReaderTest method testGetFlinkPreparingTableBase.
@Test
public void testGetFlinkPreparingTableBase() {
// Mock CatalogSchemaTable.
final ObjectIdentifier objectIdentifier = ObjectIdentifier.of("a", "b", "c");
final ResolvedSchema schema = new ResolvedSchema(Collections.emptyList(), Collections.emptyList(), null);
final CatalogTable catalogTable = ConnectorCatalogTable.source(new TestTableSource(true, TableSchema.fromResolvedSchema(schema)), true);
final ResolvedCatalogTable resolvedCatalogTable = new ResolvedCatalogTable(catalogTable, schema);
CatalogSchemaTable mockTable = new CatalogSchemaTable(ContextResolvedTable.permanent(objectIdentifier, CatalogManagerMocks.createEmptyCatalog(), resolvedCatalogTable), FlinkStatistic.UNKNOWN(), true);
rootSchemaPlus.add(tableMockName, mockTable);
Prepare.PreparingTable preparingTable = catalogReader.getTable(Collections.singletonList(tableMockName));
assertTrue(preparingTable instanceof FlinkPreparingTableBase);
}
use of org.apache.flink.table.catalog.ResolvedSchema in project flink by apache.
the class DataTypeUtilsTest method testExpandRowType.
@Test
public void testExpandRowType() {
DataType dataType = ROW(FIELD("f0", INT()), FIELD("f1", STRING()), FIELD("f2", TIMESTAMP(5).bridgedTo(Timestamp.class)), FIELD("f3", TIMESTAMP(3)));
ResolvedSchema schema = DataTypeUtils.expandCompositeTypeToSchema(dataType);
assertThat(schema).isEqualTo(ResolvedSchema.of(Column.physical("f0", INT()), Column.physical("f1", STRING()), Column.physical("f2", TIMESTAMP(5).bridgedTo(Timestamp.class)), Column.physical("f3", TIMESTAMP(3).bridgedTo(LocalDateTime.class))));
}
use of org.apache.flink.table.catalog.ResolvedSchema in project flink by apache.
the class DataTypeUtilsTest method testExpandLegacyCompositeType.
@Test
public void testExpandLegacyCompositeType() {
DataType dataType = TypeConversions.fromLegacyInfoToDataType(new TupleTypeInfo<>(Types.STRING, Types.INT, Types.SQL_TIMESTAMP));
ResolvedSchema schema = DataTypeUtils.expandCompositeTypeToSchema(dataType);
assertThat(schema).isEqualTo(ResolvedSchema.of(Column.physical("f0", STRING()), Column.physical("f1", INT()), Column.physical("f2", TIMESTAMP(3).bridgedTo(Timestamp.class))));
}
use of org.apache.flink.table.catalog.ResolvedSchema in project flink by apache.
the class PushWatermarkIntoTableSourceScanRuleBase method hasSourceWatermarkDeclaration.
private boolean hasSourceWatermarkDeclaration(TableSourceTable table) {
final ResolvedSchema schema = table.contextResolvedTable().getResolvedSchema();
final List<WatermarkSpec> specs = schema.getWatermarkSpecs();
// we only support one watermark spec for now
if (specs.size() != 1) {
return false;
}
final ResolvedExpression watermarkExpr = specs.get(0).getWatermarkExpression();
final FunctionDefinition function = unwrapFunctionDefinition(watermarkExpr);
return function == BuiltInFunctionDefinitions.SOURCE_WATERMARK;
}
use of org.apache.flink.table.catalog.ResolvedSchema in project flink by apache.
the class PushProjectIntoTableSourceScanRule method getPrimaryKeyProjections.
private List<RexNode> getPrimaryKeyProjections(LogicalTableScan scan) {
final TableSourceTable source = scan.getTable().unwrap(TableSourceTable.class);
final ResolvedSchema schema = source.contextResolvedTable().getResolvedSchema();
if (!schema.getPrimaryKey().isPresent()) {
return Collections.emptyList();
}
final FlinkTypeFactory typeFactory = unwrapTypeFactory(scan);
final UniqueConstraint primaryKey = schema.getPrimaryKey().get();
return primaryKey.getColumns().stream().map(columnName -> {
final int idx = scan.getRowType().getFieldNames().indexOf(columnName);
final Column column = schema.getColumn(idx).orElseThrow(() -> new TableException(String.format("Column at index %d not found.", idx)));
return new RexInputRef(idx, typeFactory.createFieldTypeFromLogicalType(column.getDataType().getLogicalType()));
}).collect(Collectors.toList());
}
Aggregations