use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.
the class TableEnvironmentTest method testCreateTableFromDescriptor.
@Test
public void testCreateTableFromDescriptor() throws Exception {
final TableEnvironmentMock tEnv = TableEnvironmentMock.getStreamingInstance();
final String catalog = tEnv.getCurrentCatalog();
final String database = tEnv.getCurrentDatabase();
final Schema schema = Schema.newBuilder().column("f0", DataTypes.INT()).build();
tEnv.createTable("T", TableDescriptor.forConnector("fake").schema(schema).option("a", "Test").build());
final ObjectPath objectPath = new ObjectPath(database, "T");
assertThat(tEnv.getCatalog(catalog).orElseThrow(AssertionError::new).tableExists(objectPath)).isTrue();
final CatalogBaseTable catalogTable = tEnv.getCatalog(catalog).orElseThrow(AssertionError::new).getTable(objectPath);
assertThat(catalogTable).isInstanceOf(CatalogTable.class);
assertThat(catalogTable.getUnresolvedSchema()).isEqualTo(schema);
assertThat(catalogTable.getOptions()).contains(entry("connector", "fake"), entry("a", "Test"));
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.
the class TableEnvironmentImpl method registerTableSinkInternal.
@Override
public void registerTableSinkInternal(String name, TableSink<?> tableSink) {
ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name));
Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier);
if (table.isPresent()) {
if (table.get() instanceof ConnectorCatalogTable<?, ?>) {
ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get();
if (sourceSinkTable.getTableSink().isPresent()) {
throw new ValidationException(String.format("Table '%s' already exists. Please choose a different name.", name));
} else {
// wrapper contains only sink (not source)
ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable.sourceAndSink(sourceSinkTable.getTableSource().get(), tableSink, !IS_STREAM_TABLE);
catalogManager.dropTemporaryTable(objectIdentifier, false);
catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false);
}
} else {
throw new ValidationException(String.format("Table '%s' already exists. Please choose a different name.", name));
}
} else {
ConnectorCatalogTable sink = ConnectorCatalogTable.sink(tableSink, !IS_STREAM_TABLE);
catalogManager.createTemporaryTable(sink, objectIdentifier, false);
}
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.
the class SqlToOperationConverter method convertAlterTable.
/**
* convert ALTER TABLE statement.
*/
private Operation convertAlterTable(SqlAlterTable sqlAlterTable) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterTable.fullTableName());
ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(tableIdentifier);
if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
throw new ValidationException(String.format("Table %s doesn't exist or is a temporary table.", tableIdentifier));
}
CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
if (baseTable instanceof CatalogView) {
throw new ValidationException("ALTER TABLE for a view is not allowed");
}
if (sqlAlterTable instanceof SqlAlterTableRename) {
UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterTableRename) sqlAlterTable).fullNewTableName());
ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
return new AlterTableRenameOperation(tableIdentifier, newTableIdentifier);
} else if (sqlAlterTable instanceof SqlAlterTableOptions) {
return convertAlterTableOptions(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableOptions) sqlAlterTable);
} else if (sqlAlterTable instanceof SqlAlterTableReset) {
return convertAlterTableReset(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableReset) sqlAlterTable);
} else if (sqlAlterTable instanceof SqlAlterTableAddConstraint) {
SqlTableConstraint constraint = ((SqlAlterTableAddConstraint) sqlAlterTable).getConstraint();
validateTableConstraint(constraint);
TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
// Sanity check for constraint.
TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriSchema);
if (constraint.getConstraintName().isPresent()) {
builder.primaryKey(constraint.getConstraintName().get(), constraint.getColumnNames());
} else {
builder.primaryKey(constraint.getColumnNames());
}
builder.build();
return new AlterTableAddConstraintOperation(tableIdentifier, constraint.getConstraintName().orElse(null), constraint.getColumnNames());
} else if (sqlAlterTable instanceof SqlAlterTableDropConstraint) {
SqlAlterTableDropConstraint dropConstraint = ((SqlAlterTableDropConstraint) sqlAlterTable);
String constraintName = dropConstraint.getConstraintName().getSimple();
TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
if (!oriSchema.getPrimaryKey().filter(pk -> pk.getName().equals(constraintName)).isPresent()) {
throw new ValidationException(String.format("CONSTRAINT [%s] does not exist", constraintName));
}
return new AlterTableDropConstraintOperation(tableIdentifier, constraintName);
} else if (sqlAlterTable instanceof SqlAddReplaceColumns) {
return OperationConverterUtils.convertAddReplaceColumns(tableIdentifier, (SqlAddReplaceColumns) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
} else if (sqlAlterTable instanceof SqlChangeColumn) {
return OperationConverterUtils.convertChangeColumn(tableIdentifier, (SqlChangeColumn) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
} else if (sqlAlterTable instanceof SqlAddPartitions) {
List<CatalogPartitionSpec> specs = new ArrayList<>();
List<CatalogPartition> partitions = new ArrayList<>();
SqlAddPartitions addPartitions = (SqlAddPartitions) sqlAlterTable;
for (int i = 0; i < addPartitions.getPartSpecs().size(); i++) {
specs.add(new CatalogPartitionSpec(addPartitions.getPartitionKVs(i)));
Map<String, String> props = OperationConverterUtils.extractProperties(addPartitions.getPartProps().get(i));
partitions.add(new CatalogPartitionImpl(props, null));
}
return new AddPartitionsOperation(tableIdentifier, addPartitions.ifNotExists(), specs, partitions);
} else if (sqlAlterTable instanceof SqlDropPartitions) {
SqlDropPartitions dropPartitions = (SqlDropPartitions) sqlAlterTable;
List<CatalogPartitionSpec> specs = new ArrayList<>();
for (int i = 0; i < dropPartitions.getPartSpecs().size(); i++) {
specs.add(new CatalogPartitionSpec(dropPartitions.getPartitionKVs(i)));
}
return new DropPartitionsOperation(tableIdentifier, dropPartitions.ifExists(), specs);
} else if (sqlAlterTable instanceof SqlAlterTableCompact) {
return convertAlterTableCompact(tableIdentifier, optionalCatalogTable.get(), (SqlAlterTableCompact) sqlAlterTable);
} else {
throw new ValidationException(String.format("[%s] needs to implement", sqlAlterTable.toSqlString(CalciteSqlDialect.DEFAULT)));
}
}
use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.
the class TestValuesCatalog method listPartitionsByFilter.
@Override
public List<CatalogPartitionSpec> listPartitionsByFilter(ObjectPath tablePath, List<Expression> filters) throws TableNotExistException, TableNotPartitionedException, CatalogException {
if (!supportListPartitionByFilter) {
throw new UnsupportedOperationException("TestValuesCatalog doesn't support list partition by filters");
}
List<CatalogPartitionSpec> partitions = listPartitions(tablePath);
if (partitions.isEmpty()) {
return partitions;
}
CatalogBaseTable table = this.getTable(tablePath);
TableSchema schema = table.getSchema();
List<ResolvedExpression> resolvedExpressions = filters.stream().map(filter -> {
if (filter instanceof ResolvedExpression) {
return (ResolvedExpression) filter;
}
throw new UnsupportedOperationException(String.format("TestValuesCatalog only works with resolved expressions. Get unresolved expression: %s", filter));
}).collect(Collectors.toList());
return partitions.stream().filter(partition -> {
Function<String, Comparable<?>> getter = getValueGetter(partition.getPartitionSpec(), schema);
return FilterUtils.isRetainedAfterApplyingFilterPredicates(resolvedExpressions, getter);
}).collect(Collectors.toList());
}
Aggregations