use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.
the class SqlToOperationConverterTest method checkAlterTableCompact.
private void checkAlterTableCompact(Operation operation, Map<String, String> staticPartitions) {
assertThat(operation).isInstanceOf(SinkModifyOperation.class);
SinkModifyOperation modifyOperation = (SinkModifyOperation) operation;
assertThat(modifyOperation.getStaticPartitions()).containsExactlyInAnyOrderEntriesOf(staticPartitions);
assertThat(modifyOperation.isOverwrite()).isFalse();
assertThat(modifyOperation.getDynamicOptions()).containsEntry(TestManagedTableFactory.ENRICHED_KEY, TestManagedTableFactory.ENRICHED_VALUE);
ContextResolvedTable contextResolvedTable = modifyOperation.getContextResolvedTable();
assertThat(contextResolvedTable.getIdentifier()).isEqualTo(ObjectIdentifier.of("cat1", "db1", "tb1"));
assertThat(modifyOperation.getChild()).isInstanceOf(SourceQueryOperation.class);
SourceQueryOperation child = (SourceQueryOperation) modifyOperation.getChild();
assertThat(child.getChildren()).isEmpty();
assertThat(child.getDynamicOptions()).containsEntry("k", "v");
assertThat(child.getDynamicOptions()).containsEntry(TestManagedTableFactory.ENRICHED_KEY, TestManagedTableFactory.ENRICHED_VALUE);
}
use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.
the class CatalogSourceTable method toRel.
@Override
public RelNode toRel(ToRelContext toRelContext) {
final RelOptCluster cluster = toRelContext.getCluster();
final List<RelHint> hints = toRelContext.getTableHints();
final FlinkContext context = ShortcutUtils.unwrapContext(cluster);
final FlinkRelBuilder relBuilder = FlinkRelBuilder.of(cluster, relOptSchema);
// finalize catalog table with option hints
final Map<String, String> hintedOptions = FlinkHints.getHintedOptions(hints);
final ContextResolvedTable catalogTable = computeContextResolvedTable(context, hintedOptions);
// create table source
final DynamicTableSource tableSource = createDynamicTableSource(context, catalogTable.getResolvedTable());
// prepare table source and convert to RelNode
return DynamicSourceUtils.convertSourceToRel(!schemaTable.isStreamingMode(), context.getTableConfig().getConfiguration(), relBuilder, schemaTable.getContextResolvedTable(), schemaTable.getStatistic(), hints, tableSource);
}
use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.
the class SqlToOperationConverter method convertAlterTable.
/**
* convert ALTER TABLE statement.
*/
private Operation convertAlterTable(SqlAlterTable sqlAlterTable) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterTable.fullTableName());
ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(tableIdentifier);
if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
throw new ValidationException(String.format("Table %s doesn't exist or is a temporary table.", tableIdentifier));
}
CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
if (baseTable instanceof CatalogView) {
throw new ValidationException("ALTER TABLE for a view is not allowed");
}
if (sqlAlterTable instanceof SqlAlterTableRename) {
UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterTableRename) sqlAlterTable).fullNewTableName());
ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
return new AlterTableRenameOperation(tableIdentifier, newTableIdentifier);
} else if (sqlAlterTable instanceof SqlAlterTableOptions) {
return convertAlterTableOptions(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableOptions) sqlAlterTable);
} else if (sqlAlterTable instanceof SqlAlterTableReset) {
return convertAlterTableReset(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableReset) sqlAlterTable);
} else if (sqlAlterTable instanceof SqlAlterTableAddConstraint) {
SqlTableConstraint constraint = ((SqlAlterTableAddConstraint) sqlAlterTable).getConstraint();
validateTableConstraint(constraint);
TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
// Sanity check for constraint.
TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriSchema);
if (constraint.getConstraintName().isPresent()) {
builder.primaryKey(constraint.getConstraintName().get(), constraint.getColumnNames());
} else {
builder.primaryKey(constraint.getColumnNames());
}
builder.build();
return new AlterTableAddConstraintOperation(tableIdentifier, constraint.getConstraintName().orElse(null), constraint.getColumnNames());
} else if (sqlAlterTable instanceof SqlAlterTableDropConstraint) {
SqlAlterTableDropConstraint dropConstraint = ((SqlAlterTableDropConstraint) sqlAlterTable);
String constraintName = dropConstraint.getConstraintName().getSimple();
TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
if (!oriSchema.getPrimaryKey().filter(pk -> pk.getName().equals(constraintName)).isPresent()) {
throw new ValidationException(String.format("CONSTRAINT [%s] does not exist", constraintName));
}
return new AlterTableDropConstraintOperation(tableIdentifier, constraintName);
} else if (sqlAlterTable instanceof SqlAddReplaceColumns) {
return OperationConverterUtils.convertAddReplaceColumns(tableIdentifier, (SqlAddReplaceColumns) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
} else if (sqlAlterTable instanceof SqlChangeColumn) {
return OperationConverterUtils.convertChangeColumn(tableIdentifier, (SqlChangeColumn) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
} else if (sqlAlterTable instanceof SqlAddPartitions) {
List<CatalogPartitionSpec> specs = new ArrayList<>();
List<CatalogPartition> partitions = new ArrayList<>();
SqlAddPartitions addPartitions = (SqlAddPartitions) sqlAlterTable;
for (int i = 0; i < addPartitions.getPartSpecs().size(); i++) {
specs.add(new CatalogPartitionSpec(addPartitions.getPartitionKVs(i)));
Map<String, String> props = OperationConverterUtils.extractProperties(addPartitions.getPartProps().get(i));
partitions.add(new CatalogPartitionImpl(props, null));
}
return new AddPartitionsOperation(tableIdentifier, addPartitions.ifNotExists(), specs, partitions);
} else if (sqlAlterTable instanceof SqlDropPartitions) {
SqlDropPartitions dropPartitions = (SqlDropPartitions) sqlAlterTable;
List<CatalogPartitionSpec> specs = new ArrayList<>();
for (int i = 0; i < dropPartitions.getPartSpecs().size(); i++) {
specs.add(new CatalogPartitionSpec(dropPartitions.getPartitionKVs(i)));
}
return new DropPartitionsOperation(tableIdentifier, dropPartitions.ifExists(), specs);
} else if (sqlAlterTable instanceof SqlAlterTableCompact) {
return convertAlterTableCompact(tableIdentifier, optionalCatalogTable.get(), (SqlAlterTableCompact) sqlAlterTable);
} else {
throw new ValidationException(String.format("[%s] needs to implement", sqlAlterTable.toSqlString(CalciteSqlDialect.DEFAULT)));
}
}
use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.
the class SqlToOperationConverter method convertSqlInsert.
/**
* Convert insert into statement.
*/
private Operation convertSqlInsert(RichSqlInsert insert) {
// Get sink table name.
List<String> targetTablePath = ((SqlIdentifier) insert.getTargetTableID()).names;
// Get sink table hints.
HintStrategyTable hintStrategyTable = flinkPlanner.config().getSqlToRelConverterConfig().getHintStrategyTable();
List<RelHint> tableHints = SqlUtil.getRelHint(hintStrategyTable, insert.getTableHints());
Map<String, String> dynamicOptions = FlinkHints.getHintedOptions(tableHints);
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(targetTablePath);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
ContextResolvedTable contextResolvedTable = catalogManager.getTableOrError(identifier);
PlannerQueryOperation query = (PlannerQueryOperation) convertValidatedSqlNodeOrFail(flinkPlanner, catalogManager, insert.getSource());
return new SinkModifyOperation(contextResolvedTable, query, insert.getStaticPartitionKVs(), insert.isOverwrite(), dynamicOptions);
}
use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.
the class SqlCreateTableConverter method lookupLikeSourceTable.
private CatalogTable lookupLikeSourceTable(SqlTableLike sqlTableLike) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlTableLike.getSourceTable().names);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
ContextResolvedTable lookupResult = catalogManager.getTable(identifier).orElseThrow(() -> new ValidationException(String.format("Source table '%s' of the LIKE clause not found in the catalog, at %s", identifier, sqlTableLike.getSourceTable().getParserPosition())));
if (!(lookupResult.getTable() instanceof CatalogTable)) {
throw new ValidationException(String.format("Source table '%s' of the LIKE clause can not be a VIEW, at %s", identifier, sqlTableLike.getSourceTable().getParserPosition()));
}
return lookupResult.getTable();
}
Aggregations