Search in sources :

Example 11 with ContextResolvedTable

use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.

the class SqlToOperationConverterTest method checkAlterTableCompact.

private void checkAlterTableCompact(Operation operation, Map<String, String> staticPartitions) {
    assertThat(operation).isInstanceOf(SinkModifyOperation.class);
    SinkModifyOperation modifyOperation = (SinkModifyOperation) operation;
    assertThat(modifyOperation.getStaticPartitions()).containsExactlyInAnyOrderEntriesOf(staticPartitions);
    assertThat(modifyOperation.isOverwrite()).isFalse();
    assertThat(modifyOperation.getDynamicOptions()).containsEntry(TestManagedTableFactory.ENRICHED_KEY, TestManagedTableFactory.ENRICHED_VALUE);
    ContextResolvedTable contextResolvedTable = modifyOperation.getContextResolvedTable();
    assertThat(contextResolvedTable.getIdentifier()).isEqualTo(ObjectIdentifier.of("cat1", "db1", "tb1"));
    assertThat(modifyOperation.getChild()).isInstanceOf(SourceQueryOperation.class);
    SourceQueryOperation child = (SourceQueryOperation) modifyOperation.getChild();
    assertThat(child.getChildren()).isEmpty();
    assertThat(child.getDynamicOptions()).containsEntry("k", "v");
    assertThat(child.getDynamicOptions()).containsEntry(TestManagedTableFactory.ENRICHED_KEY, TestManagedTableFactory.ENRICHED_VALUE);
}
Also used : SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable)

Example 12 with ContextResolvedTable

use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.

the class CatalogSourceTable method toRel.

@Override
public RelNode toRel(ToRelContext toRelContext) {
    final RelOptCluster cluster = toRelContext.getCluster();
    final List<RelHint> hints = toRelContext.getTableHints();
    final FlinkContext context = ShortcutUtils.unwrapContext(cluster);
    final FlinkRelBuilder relBuilder = FlinkRelBuilder.of(cluster, relOptSchema);
    // finalize catalog table with option hints
    final Map<String, String> hintedOptions = FlinkHints.getHintedOptions(hints);
    final ContextResolvedTable catalogTable = computeContextResolvedTable(context, hintedOptions);
    // create table source
    final DynamicTableSource tableSource = createDynamicTableSource(context, catalogTable.getResolvedTable());
    // prepare table source and convert to RelNode
    return DynamicSourceUtils.convertSourceToRel(!schemaTable.isStreamingMode(), context.getTableConfig().getConfiguration(), relBuilder, schemaTable.getContextResolvedTable(), schemaTable.getStatistic(), hints, tableSource);
}
Also used : RelOptCluster(org.apache.calcite.plan.RelOptCluster) FlinkRelBuilder(org.apache.flink.table.planner.calcite.FlinkRelBuilder) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) RelHint(org.apache.calcite.rel.hint.RelHint) FlinkContext(org.apache.flink.table.planner.calcite.FlinkContext) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource)

Example 13 with ContextResolvedTable

use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.

the class SqlToOperationConverter method convertAlterTable.

/**
 * convert ALTER TABLE statement.
 */
private Operation convertAlterTable(SqlAlterTable sqlAlterTable) {
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterTable.fullTableName());
    ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(tableIdentifier);
    if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
        throw new ValidationException(String.format("Table %s doesn't exist or is a temporary table.", tableIdentifier));
    }
    CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
    if (baseTable instanceof CatalogView) {
        throw new ValidationException("ALTER TABLE for a view is not allowed");
    }
    if (sqlAlterTable instanceof SqlAlterTableRename) {
        UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterTableRename) sqlAlterTable).fullNewTableName());
        ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
        return new AlterTableRenameOperation(tableIdentifier, newTableIdentifier);
    } else if (sqlAlterTable instanceof SqlAlterTableOptions) {
        return convertAlterTableOptions(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableOptions) sqlAlterTable);
    } else if (sqlAlterTable instanceof SqlAlterTableReset) {
        return convertAlterTableReset(tableIdentifier, (CatalogTable) baseTable, (SqlAlterTableReset) sqlAlterTable);
    } else if (sqlAlterTable instanceof SqlAlterTableAddConstraint) {
        SqlTableConstraint constraint = ((SqlAlterTableAddConstraint) sqlAlterTable).getConstraint();
        validateTableConstraint(constraint);
        TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
        // Sanity check for constraint.
        TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriSchema);
        if (constraint.getConstraintName().isPresent()) {
            builder.primaryKey(constraint.getConstraintName().get(), constraint.getColumnNames());
        } else {
            builder.primaryKey(constraint.getColumnNames());
        }
        builder.build();
        return new AlterTableAddConstraintOperation(tableIdentifier, constraint.getConstraintName().orElse(null), constraint.getColumnNames());
    } else if (sqlAlterTable instanceof SqlAlterTableDropConstraint) {
        SqlAlterTableDropConstraint dropConstraint = ((SqlAlterTableDropConstraint) sqlAlterTable);
        String constraintName = dropConstraint.getConstraintName().getSimple();
        TableSchema oriSchema = TableSchema.fromResolvedSchema(baseTable.getUnresolvedSchema().resolve(catalogManager.getSchemaResolver()));
        if (!oriSchema.getPrimaryKey().filter(pk -> pk.getName().equals(constraintName)).isPresent()) {
            throw new ValidationException(String.format("CONSTRAINT [%s] does not exist", constraintName));
        }
        return new AlterTableDropConstraintOperation(tableIdentifier, constraintName);
    } else if (sqlAlterTable instanceof SqlAddReplaceColumns) {
        return OperationConverterUtils.convertAddReplaceColumns(tableIdentifier, (SqlAddReplaceColumns) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
    } else if (sqlAlterTable instanceof SqlChangeColumn) {
        return OperationConverterUtils.convertChangeColumn(tableIdentifier, (SqlChangeColumn) sqlAlterTable, (CatalogTable) baseTable, flinkPlanner.getOrCreateSqlValidator());
    } else if (sqlAlterTable instanceof SqlAddPartitions) {
        List<CatalogPartitionSpec> specs = new ArrayList<>();
        List<CatalogPartition> partitions = new ArrayList<>();
        SqlAddPartitions addPartitions = (SqlAddPartitions) sqlAlterTable;
        for (int i = 0; i < addPartitions.getPartSpecs().size(); i++) {
            specs.add(new CatalogPartitionSpec(addPartitions.getPartitionKVs(i)));
            Map<String, String> props = OperationConverterUtils.extractProperties(addPartitions.getPartProps().get(i));
            partitions.add(new CatalogPartitionImpl(props, null));
        }
        return new AddPartitionsOperation(tableIdentifier, addPartitions.ifNotExists(), specs, partitions);
    } else if (sqlAlterTable instanceof SqlDropPartitions) {
        SqlDropPartitions dropPartitions = (SqlDropPartitions) sqlAlterTable;
        List<CatalogPartitionSpec> specs = new ArrayList<>();
        for (int i = 0; i < dropPartitions.getPartSpecs().size(); i++) {
            specs.add(new CatalogPartitionSpec(dropPartitions.getPartitionKVs(i)));
        }
        return new DropPartitionsOperation(tableIdentifier, dropPartitions.ifExists(), specs);
    } else if (sqlAlterTable instanceof SqlAlterTableCompact) {
        return convertAlterTableCompact(tableIdentifier, optionalCatalogTable.get(), (SqlAlterTableCompact) sqlAlterTable);
    } else {
        throw new ValidationException(String.format("[%s] needs to implement", sqlAlterTable.toSqlString(CalciteSqlDialect.DEFAULT)));
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) SqlAlterTableReset(org.apache.flink.sql.parser.ddl.SqlAlterTableReset) ArrayList(java.util.ArrayList) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) SqlAlterTableRename(org.apache.flink.sql.parser.ddl.SqlAlterTableRename) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) SqlAddReplaceColumns(org.apache.flink.sql.parser.ddl.SqlAddReplaceColumns) SqlAddPartitions(org.apache.flink.sql.parser.ddl.SqlAddPartitions) SqlAlterTableAddConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableAddConstraint) SqlAlterTableCompact(org.apache.flink.sql.parser.ddl.SqlAlterTableCompact) ArrayList(java.util.ArrayList) List(java.util.List) SqlNodeList(org.apache.calcite.sql.SqlNodeList) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) CatalogView(org.apache.flink.table.catalog.CatalogView) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) SqlAlterTableAddConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableAddConstraint) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) RelHint(org.apache.calcite.rel.hint.RelHint) SqlAlterTableDropConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableDropConstraint) SqlChangeColumn(org.apache.flink.sql.parser.ddl.SqlChangeColumn) SqlAlterTableOptions(org.apache.flink.sql.parser.ddl.SqlAlterTableOptions) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) SqlAlterTableDropConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableDropConstraint) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogPartitionImpl(org.apache.flink.table.catalog.CatalogPartitionImpl) SqlDropPartitions(org.apache.flink.sql.parser.ddl.SqlDropPartitions)

Example 14 with ContextResolvedTable

use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.

the class SqlToOperationConverter method convertSqlInsert.

/**
 * Convert insert into statement.
 */
private Operation convertSqlInsert(RichSqlInsert insert) {
    // Get sink table name.
    List<String> targetTablePath = ((SqlIdentifier) insert.getTargetTableID()).names;
    // Get sink table hints.
    HintStrategyTable hintStrategyTable = flinkPlanner.config().getSqlToRelConverterConfig().getHintStrategyTable();
    List<RelHint> tableHints = SqlUtil.getRelHint(hintStrategyTable, insert.getTableHints());
    Map<String, String> dynamicOptions = FlinkHints.getHintedOptions(tableHints);
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(targetTablePath);
    ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    ContextResolvedTable contextResolvedTable = catalogManager.getTableOrError(identifier);
    PlannerQueryOperation query = (PlannerQueryOperation) convertValidatedSqlNodeOrFail(flinkPlanner, catalogManager, insert.getSource());
    return new SinkModifyOperation(contextResolvedTable, query, insert.getStaticPartitionKVs(), insert.isOverwrite(), dynamicOptions);
}
Also used : SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier) RelHint(org.apache.calcite.rel.hint.RelHint) HintStrategyTable(org.apache.calcite.rel.hint.HintStrategyTable) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 15 with ContextResolvedTable

use of org.apache.flink.table.catalog.ContextResolvedTable in project flink by apache.

the class SqlCreateTableConverter method lookupLikeSourceTable.

private CatalogTable lookupLikeSourceTable(SqlTableLike sqlTableLike) {
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlTableLike.getSourceTable().names);
    ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    ContextResolvedTable lookupResult = catalogManager.getTable(identifier).orElseThrow(() -> new ValidationException(String.format("Source table '%s' of the LIKE clause not found in the catalog, at %s", identifier, sqlTableLike.getSourceTable().getParserPosition())));
    if (!(lookupResult.getTable() instanceof CatalogTable)) {
        throw new ValidationException(String.format("Source table '%s' of the LIKE clause can not be a VIEW, at %s", identifier, sqlTableLike.getSourceTable().getParserPosition()));
    }
    return lookupResult.getTable();
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)15 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)8 ValidationException (org.apache.flink.table.api.ValidationException)7 CatalogTable (org.apache.flink.table.catalog.CatalogTable)6 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)6 UnresolvedIdentifier (org.apache.flink.table.catalog.UnresolvedIdentifier)5 Map (java.util.Map)4 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)4 HashMap (java.util.HashMap)3 List (java.util.List)3 RelHint (org.apache.calcite.rel.hint.RelHint)3 ObjectPath (org.apache.flink.table.catalog.ObjectPath)3 IOException (java.io.IOException)2 ArrayList (java.util.ArrayList)2 LinkedHashMap (java.util.LinkedHashMap)2 Optional (java.util.Optional)2 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)2 SinkModifyOperation (org.apache.flink.table.operations.SinkModifyOperation)2 AlterViewAsOperation (org.apache.flink.table.operations.ddl.AlterViewAsOperation)2 AlterViewPropertiesOperation (org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation)2