use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.
the class PushPartitionIntoTableSourceScanRule method matches.
@Override
public boolean matches(RelOptRuleCall call) {
Filter filter = call.rel(0);
if (filter.getCondition() == null) {
return false;
}
TableSourceTable tableSourceTable = call.rel(1).getTable().unwrap(TableSourceTable.class);
if (tableSourceTable == null) {
return false;
}
DynamicTableSource dynamicTableSource = tableSourceTable.tableSource();
if (!(dynamicTableSource instanceof SupportsPartitionPushDown)) {
return false;
}
CatalogTable catalogTable = tableSourceTable.contextResolvedTable().getTable();
if (!catalogTable.isPartitioned() || catalogTable.getPartitionKeys().isEmpty()) {
return false;
}
return Arrays.stream(tableSourceTable.abilitySpecs()).noneMatch(spec -> spec instanceof PartitionPushDownSpec);
}
use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.
the class SqlToOperationConverter method convertAlterView.
/**
* convert ALTER VIEW statement.
*/
private Operation convertAlterView(SqlAlterView alterView) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(alterView.fullViewName());
ObjectIdentifier viewIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(viewIdentifier);
if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
throw new ValidationException(String.format("View %s doesn't exist or is a temporary view.", viewIdentifier.toString()));
}
CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
if (baseTable instanceof CatalogTable) {
throw new ValidationException("ALTER VIEW for a table is not allowed");
}
if (alterView instanceof SqlAlterViewRename) {
UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterViewRename) alterView).fullNewViewName());
ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
return new AlterViewRenameOperation(viewIdentifier, newTableIdentifier);
} else if (alterView instanceof SqlAlterViewProperties) {
SqlAlterViewProperties alterViewProperties = (SqlAlterViewProperties) alterView;
CatalogView oldView = (CatalogView) baseTable;
Map<String, String> newProperties = new HashMap<>(oldView.getOptions());
newProperties.putAll(OperationConverterUtils.extractProperties(alterViewProperties.getPropertyList()));
CatalogView newView = new CatalogViewImpl(oldView.getOriginalQuery(), oldView.getExpandedQuery(), oldView.getSchema(), newProperties, oldView.getComment());
return new AlterViewPropertiesOperation(viewIdentifier, newView);
} else if (alterView instanceof SqlAlterViewAs) {
SqlAlterViewAs alterViewAs = (SqlAlterViewAs) alterView;
final SqlNode newQuery = alterViewAs.getNewQuery();
CatalogView oldView = (CatalogView) baseTable;
CatalogView newView = convertViewQuery(newQuery, Collections.emptyList(), oldView.getOptions(), oldView.getComment());
return new AlterViewAsOperation(viewIdentifier, newView);
} else {
throw new ValidationException(String.format("[%s] needs to implement", alterView.toSqlString(CalciteSqlDialect.DEFAULT)));
}
}
use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.
the class SqlCreateTableConverter method convertCreateTable.
/**
* Convert the {@link SqlCreateTable} node.
*/
Operation convertCreateTable(SqlCreateTable sqlCreateTable) {
sqlCreateTable.getTableConstraints().forEach(validateTableConstraint);
CatalogTable catalogTable = createCatalogTable(sqlCreateTable);
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlCreateTable.fullTableName());
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return new CreateTableOperation(identifier, catalogTable, sqlCreateTable.isIfNotExists(), sqlCreateTable.isTemporary());
}
use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.
the class SqlToOperationConverterTest method prepareTable.
private void prepareTable(boolean managedTable, boolean hasPartition, boolean hasConstraint) throws Exception {
Catalog catalog = new GenericInMemoryCatalog("default", "default");
catalogManager.registerCatalog("cat1", catalog);
catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true);
Schema.Builder builder = Schema.newBuilder().column("a", DataTypes.STRING().notNull()).column("b", DataTypes.BIGINT().notNull()).column("c", DataTypes.BIGINT());
Map<String, String> options = new HashMap<>();
options.put("k", "v");
if (!managedTable) {
options.put("connector", "dummy");
}
CatalogTable catalogTable = CatalogTable.of(hasConstraint ? builder.primaryKeyNamed("ct1", "a", "b").build() : builder.build(), "tb1", hasPartition ? Arrays.asList("b", "c") : Collections.emptyList(), Collections.unmodifiableMap(options));
catalogManager.setCurrentCatalog("cat1");
catalogManager.setCurrentDatabase("db1");
ObjectIdentifier tableIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1");
catalogManager.createTable(catalogTable, tableIdentifier, true);
}
use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.
the class SqlToOperationConverterTest method testCreateTableLikeInvalidPartition.
@Test
public void testCreateTableLikeInvalidPartition() {
CatalogTable catalogTable = CatalogTable.of(Schema.newBuilder().column("f0", DataTypes.INT().notNull()).build(), null, Collections.emptyList(), Collections.emptyMap());
catalogManager.createTable(catalogTable, ObjectIdentifier.of("builtin", "default", "sourceTable"), false);
final String sql = "create table derivedTable(\n" + " a int\n" + ")\n" + "PARTITIONED BY (f3)\n" + "like sourceTable";
assertThatThrownBy(() -> parseAndConvert(sql)).isInstanceOf(ValidationException.class).hasMessageContaining("Partition column 'f3' not defined in the table schema. Available columns: ['f0', 'a']");
}
Aggregations