Search in sources :

Example 6 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class TableEnvironmentImpl method executeInternal.

@Override
public TableResultInternal executeInternal(Operation operation) {
    if (operation instanceof ModifyOperation) {
        return executeInternal(Collections.singletonList((ModifyOperation) operation));
    } else if (operation instanceof StatementSetOperation) {
        return executeInternal(((StatementSetOperation) operation).getOperations());
    } else if (operation instanceof CreateTableOperation) {
        CreateTableOperation createTableOperation = (CreateTableOperation) operation;
        if (createTableOperation.isTemporary()) {
            catalogManager.createTemporaryTable(createTableOperation.getCatalogTable(), createTableOperation.getTableIdentifier(), createTableOperation.isIgnoreIfExists());
        } else {
            catalogManager.createTable(createTableOperation.getCatalogTable(), createTableOperation.getTableIdentifier(), createTableOperation.isIgnoreIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof DropTableOperation) {
        DropTableOperation dropTableOperation = (DropTableOperation) operation;
        if (dropTableOperation.isTemporary()) {
            catalogManager.dropTemporaryTable(dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists());
        } else {
            catalogManager.dropTable(dropTableOperation.getTableIdentifier(), dropTableOperation.isIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof AlterTableOperation) {
        AlterTableOperation alterTableOperation = (AlterTableOperation) operation;
        Catalog catalog = getCatalogOrThrowException(alterTableOperation.getTableIdentifier().getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(alterTableOperation.asSummaryString());
        try {
            if (alterTableOperation instanceof AlterTableRenameOperation) {
                AlterTableRenameOperation alterTableRenameOp = (AlterTableRenameOperation) operation;
                catalog.renameTable(alterTableRenameOp.getTableIdentifier().toObjectPath(), alterTableRenameOp.getNewTableIdentifier().getObjectName(), false);
            } else if (alterTableOperation instanceof AlterTableOptionsOperation) {
                AlterTableOptionsOperation alterTablePropertiesOp = (AlterTableOptionsOperation) operation;
                catalogManager.alterTable(alterTablePropertiesOp.getCatalogTable(), alterTablePropertiesOp.getTableIdentifier(), false);
            } else if (alterTableOperation instanceof AlterTableAddConstraintOperation) {
                AlterTableAddConstraintOperation addConstraintOP = (AlterTableAddConstraintOperation) operation;
                CatalogTable oriTable = catalogManager.getTable(addConstraintOP.getTableIdentifier()).get().getTable();
                TableSchema.Builder builder = TableSchemaUtils.builderWithGivenSchema(oriTable.getSchema());
                if (addConstraintOP.getConstraintName().isPresent()) {
                    builder.primaryKey(addConstraintOP.getConstraintName().get(), addConstraintOP.getColumnNames());
                } else {
                    builder.primaryKey(addConstraintOP.getColumnNames());
                }
                CatalogTable newTable = new CatalogTableImpl(builder.build(), oriTable.getPartitionKeys(), oriTable.getOptions(), oriTable.getComment());
                catalogManager.alterTable(newTable, addConstraintOP.getTableIdentifier(), false);
            } else if (alterTableOperation instanceof AlterTableDropConstraintOperation) {
                AlterTableDropConstraintOperation dropConstraintOperation = (AlterTableDropConstraintOperation) operation;
                CatalogTable oriTable = catalogManager.getTable(dropConstraintOperation.getTableIdentifier()).get().getTable();
                CatalogTable newTable = new CatalogTableImpl(TableSchemaUtils.dropConstraint(oriTable.getSchema(), dropConstraintOperation.getConstraintName()), oriTable.getPartitionKeys(), oriTable.getOptions(), oriTable.getComment());
                catalogManager.alterTable(newTable, dropConstraintOperation.getTableIdentifier(), false);
            } else if (alterTableOperation instanceof AlterPartitionPropertiesOperation) {
                AlterPartitionPropertiesOperation alterPartPropsOp = (AlterPartitionPropertiesOperation) operation;
                catalog.alterPartition(alterPartPropsOp.getTableIdentifier().toObjectPath(), alterPartPropsOp.getPartitionSpec(), alterPartPropsOp.getCatalogPartition(), false);
            } else if (alterTableOperation instanceof AlterTableSchemaOperation) {
                AlterTableSchemaOperation alterTableSchemaOperation = (AlterTableSchemaOperation) alterTableOperation;
                catalogManager.alterTable(alterTableSchemaOperation.getCatalogTable(), alterTableSchemaOperation.getTableIdentifier(), false);
            } else if (alterTableOperation instanceof AddPartitionsOperation) {
                AddPartitionsOperation addPartitionsOperation = (AddPartitionsOperation) alterTableOperation;
                List<CatalogPartitionSpec> specs = addPartitionsOperation.getPartitionSpecs();
                List<CatalogPartition> partitions = addPartitionsOperation.getCatalogPartitions();
                boolean ifNotExists = addPartitionsOperation.ifNotExists();
                ObjectPath tablePath = addPartitionsOperation.getTableIdentifier().toObjectPath();
                for (int i = 0; i < specs.size(); i++) {
                    catalog.createPartition(tablePath, specs.get(i), partitions.get(i), ifNotExists);
                }
            } else if (alterTableOperation instanceof DropPartitionsOperation) {
                DropPartitionsOperation dropPartitionsOperation = (DropPartitionsOperation) alterTableOperation;
                ObjectPath tablePath = dropPartitionsOperation.getTableIdentifier().toObjectPath();
                boolean ifExists = dropPartitionsOperation.ifExists();
                for (CatalogPartitionSpec spec : dropPartitionsOperation.getPartitionSpecs()) {
                    catalog.dropPartition(tablePath, spec, ifExists);
                }
            }
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (TableAlreadyExistException | TableNotExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof CreateViewOperation) {
        CreateViewOperation createViewOperation = (CreateViewOperation) operation;
        if (createViewOperation.isTemporary()) {
            catalogManager.createTemporaryTable(createViewOperation.getCatalogView(), createViewOperation.getViewIdentifier(), createViewOperation.isIgnoreIfExists());
        } else {
            catalogManager.createTable(createViewOperation.getCatalogView(), createViewOperation.getViewIdentifier(), createViewOperation.isIgnoreIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof DropViewOperation) {
        DropViewOperation dropViewOperation = (DropViewOperation) operation;
        if (dropViewOperation.isTemporary()) {
            catalogManager.dropTemporaryView(dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists());
        } else {
            catalogManager.dropView(dropViewOperation.getViewIdentifier(), dropViewOperation.isIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof AlterViewOperation) {
        AlterViewOperation alterViewOperation = (AlterViewOperation) operation;
        Catalog catalog = getCatalogOrThrowException(alterViewOperation.getViewIdentifier().getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(alterViewOperation.asSummaryString());
        try {
            if (alterViewOperation instanceof AlterViewRenameOperation) {
                AlterViewRenameOperation alterTableRenameOp = (AlterViewRenameOperation) operation;
                catalog.renameTable(alterTableRenameOp.getViewIdentifier().toObjectPath(), alterTableRenameOp.getNewViewIdentifier().getObjectName(), false);
            } else if (alterViewOperation instanceof AlterViewPropertiesOperation) {
                AlterViewPropertiesOperation alterTablePropertiesOp = (AlterViewPropertiesOperation) operation;
                catalogManager.alterTable(alterTablePropertiesOp.getCatalogView(), alterTablePropertiesOp.getViewIdentifier(), false);
            } else if (alterViewOperation instanceof AlterViewAsOperation) {
                AlterViewAsOperation alterViewAsOperation = (AlterViewAsOperation) alterViewOperation;
                catalogManager.alterTable(alterViewAsOperation.getNewView(), alterViewAsOperation.getViewIdentifier(), false);
            }
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (TableAlreadyExistException | TableNotExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof CreateDatabaseOperation) {
        CreateDatabaseOperation createDatabaseOperation = (CreateDatabaseOperation) operation;
        Catalog catalog = getCatalogOrThrowException(createDatabaseOperation.getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(createDatabaseOperation.asSummaryString());
        try {
            catalog.createDatabase(createDatabaseOperation.getDatabaseName(), createDatabaseOperation.getCatalogDatabase(), createDatabaseOperation.isIgnoreIfExists());
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (DatabaseAlreadyExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof DropDatabaseOperation) {
        DropDatabaseOperation dropDatabaseOperation = (DropDatabaseOperation) operation;
        Catalog catalog = getCatalogOrThrowException(dropDatabaseOperation.getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(dropDatabaseOperation.asSummaryString());
        try {
            catalog.dropDatabase(dropDatabaseOperation.getDatabaseName(), dropDatabaseOperation.isIfExists(), dropDatabaseOperation.isCascade());
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (DatabaseNotExistException | DatabaseNotEmptyException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof AlterDatabaseOperation) {
        AlterDatabaseOperation alterDatabaseOperation = (AlterDatabaseOperation) operation;
        Catalog catalog = getCatalogOrThrowException(alterDatabaseOperation.getCatalogName());
        String exMsg = getDDLOpExecuteErrorMsg(alterDatabaseOperation.asSummaryString());
        try {
            catalog.alterDatabase(alterDatabaseOperation.getDatabaseName(), alterDatabaseOperation.getCatalogDatabase(), false);
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (DatabaseNotExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof CreateCatalogFunctionOperation) {
        return createCatalogFunction((CreateCatalogFunctionOperation) operation);
    } else if (operation instanceof CreateTempSystemFunctionOperation) {
        return createSystemFunction((CreateTempSystemFunctionOperation) operation);
    } else if (operation instanceof DropCatalogFunctionOperation) {
        return dropCatalogFunction((DropCatalogFunctionOperation) operation);
    } else if (operation instanceof DropTempSystemFunctionOperation) {
        return dropSystemFunction((DropTempSystemFunctionOperation) operation);
    } else if (operation instanceof AlterCatalogFunctionOperation) {
        return alterCatalogFunction((AlterCatalogFunctionOperation) operation);
    } else if (operation instanceof CreateCatalogOperation) {
        return createCatalog((CreateCatalogOperation) operation);
    } else if (operation instanceof DropCatalogOperation) {
        DropCatalogOperation dropCatalogOperation = (DropCatalogOperation) operation;
        String exMsg = getDDLOpExecuteErrorMsg(dropCatalogOperation.asSummaryString());
        try {
            catalogManager.unregisterCatalog(dropCatalogOperation.getCatalogName(), dropCatalogOperation.isIfExists());
            return TableResultImpl.TABLE_RESULT_OK;
        } catch (CatalogException e) {
            throw new ValidationException(exMsg, e);
        }
    } else if (operation instanceof LoadModuleOperation) {
        return loadModule((LoadModuleOperation) operation);
    } else if (operation instanceof UnloadModuleOperation) {
        return unloadModule((UnloadModuleOperation) operation);
    } else if (operation instanceof UseModulesOperation) {
        return useModules((UseModulesOperation) operation);
    } else if (operation instanceof UseCatalogOperation) {
        UseCatalogOperation useCatalogOperation = (UseCatalogOperation) operation;
        catalogManager.setCurrentCatalog(useCatalogOperation.getCatalogName());
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof UseDatabaseOperation) {
        UseDatabaseOperation useDatabaseOperation = (UseDatabaseOperation) operation;
        catalogManager.setCurrentCatalog(useDatabaseOperation.getCatalogName());
        catalogManager.setCurrentDatabase(useDatabaseOperation.getDatabaseName());
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof ShowCatalogsOperation) {
        return buildShowResult("catalog name", listCatalogs());
    } else if (operation instanceof ShowCreateTableOperation) {
        ShowCreateTableOperation showCreateTableOperation = (ShowCreateTableOperation) operation;
        ContextResolvedTable table = catalogManager.getTable(showCreateTableOperation.getTableIdentifier()).orElseThrow(() -> new ValidationException(String.format("Could not execute SHOW CREATE TABLE. Table with identifier %s does not exist.", showCreateTableOperation.getTableIdentifier().asSerializableString())));
        return TableResultImpl.builder().resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))).data(Collections.singletonList(Row.of(ShowCreateUtil.buildShowCreateTableRow(table.getResolvedTable(), showCreateTableOperation.getTableIdentifier(), table.isTemporary())))).build();
    } else if (operation instanceof ShowCreateViewOperation) {
        ShowCreateViewOperation showCreateViewOperation = (ShowCreateViewOperation) operation;
        final ContextResolvedTable table = catalogManager.getTable(showCreateViewOperation.getViewIdentifier()).orElseThrow(() -> new ValidationException(String.format("Could not execute SHOW CREATE VIEW. View with identifier %s does not exist.", showCreateViewOperation.getViewIdentifier().asSerializableString())));
        return TableResultImpl.builder().resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))).data(Collections.singletonList(Row.of(ShowCreateUtil.buildShowCreateViewRow(table.getResolvedTable(), showCreateViewOperation.getViewIdentifier(), table.isTemporary())))).build();
    } else if (operation instanceof ShowCurrentCatalogOperation) {
        return buildShowResult("current catalog name", new String[] { catalogManager.getCurrentCatalog() });
    } else if (operation instanceof ShowDatabasesOperation) {
        return buildShowResult("database name", listDatabases());
    } else if (operation instanceof ShowCurrentDatabaseOperation) {
        return buildShowResult("current database name", new String[] { catalogManager.getCurrentDatabase() });
    } else if (operation instanceof ShowModulesOperation) {
        ShowModulesOperation showModulesOperation = (ShowModulesOperation) operation;
        if (showModulesOperation.requireFull()) {
            return buildShowFullModulesResult(listFullModules());
        } else {
            return buildShowResult("module name", listModules());
        }
    } else if (operation instanceof ShowTablesOperation) {
        return buildShowResult("table name", listTables());
    } else if (operation instanceof ShowFunctionsOperation) {
        ShowFunctionsOperation showFunctionsOperation = (ShowFunctionsOperation) operation;
        String[] functionNames = null;
        ShowFunctionsOperation.FunctionScope functionScope = showFunctionsOperation.getFunctionScope();
        switch(functionScope) {
            case USER:
                functionNames = listUserDefinedFunctions();
                break;
            case ALL:
                functionNames = listFunctions();
                break;
            default:
                throw new UnsupportedOperationException(String.format("SHOW FUNCTIONS with %s scope is not supported.", functionScope));
        }
        return buildShowResult("function name", functionNames);
    } else if (operation instanceof ShowViewsOperation) {
        return buildShowResult("view name", listViews());
    } else if (operation instanceof ShowColumnsOperation) {
        ShowColumnsOperation showColumnsOperation = (ShowColumnsOperation) operation;
        Optional<ContextResolvedTable> result = catalogManager.getTable(showColumnsOperation.getTableIdentifier());
        if (result.isPresent()) {
            return buildShowColumnsResult(result.get().getResolvedSchema(), showColumnsOperation);
        } else {
            throw new ValidationException(String.format("Tables or views with the identifier '%s' doesn't exist.", showColumnsOperation.getTableIdentifier().asSummaryString()));
        }
    } else if (operation instanceof ShowPartitionsOperation) {
        String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
        try {
            ShowPartitionsOperation showPartitionsOperation = (ShowPartitionsOperation) operation;
            Catalog catalog = getCatalogOrThrowException(showPartitionsOperation.getTableIdentifier().getCatalogName());
            ObjectPath tablePath = showPartitionsOperation.getTableIdentifier().toObjectPath();
            CatalogPartitionSpec partitionSpec = showPartitionsOperation.getPartitionSpec();
            List<CatalogPartitionSpec> partitionSpecs = partitionSpec == null ? catalog.listPartitions(tablePath) : catalog.listPartitions(tablePath, partitionSpec);
            List<String> partitionNames = new ArrayList<>(partitionSpecs.size());
            for (CatalogPartitionSpec spec : partitionSpecs) {
                List<String> partitionKVs = new ArrayList<>(spec.getPartitionSpec().size());
                for (Map.Entry<String, String> partitionKV : spec.getPartitionSpec().entrySet()) {
                    partitionKVs.add(partitionKV.getKey() + "=" + partitionKV.getValue());
                }
                partitionNames.add(String.join("/", partitionKVs));
            }
            return buildShowResult("partition name", partitionNames.toArray(new String[0]));
        } catch (TableNotExistException e) {
            throw new ValidationException(exMsg, e);
        } catch (Exception e) {
            throw new TableException(exMsg, e);
        }
    } else if (operation instanceof ExplainOperation) {
        ExplainOperation explainOperation = (ExplainOperation) operation;
        ExplainDetail[] explainDetails = explainOperation.getExplainDetails().stream().map(ExplainDetail::valueOf).toArray(ExplainDetail[]::new);
        Operation child = ((ExplainOperation) operation).getChild();
        List<Operation> operations;
        if (child instanceof StatementSetOperation) {
            operations = new ArrayList<>(((StatementSetOperation) child).getOperations());
        } else {
            operations = Collections.singletonList(child);
        }
        String explanation = explainInternal(operations, explainDetails);
        return TableResultImpl.builder().resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(Column.physical("result", DataTypes.STRING()))).data(Collections.singletonList(Row.of(explanation))).build();
    } else if (operation instanceof DescribeTableOperation) {
        DescribeTableOperation describeTableOperation = (DescribeTableOperation) operation;
        Optional<ContextResolvedTable> result = catalogManager.getTable(describeTableOperation.getSqlIdentifier());
        if (result.isPresent()) {
            return buildDescribeResult(result.get().getResolvedSchema());
        } else {
            throw new ValidationException(String.format("Tables or views with the identifier '%s' doesn't exist", describeTableOperation.getSqlIdentifier().asSummaryString()));
        }
    } else if (operation instanceof QueryOperation) {
        return executeQueryOperation((QueryOperation) operation);
    } else if (operation instanceof CreateTableASOperation) {
        CreateTableASOperation createTableASOperation = (CreateTableASOperation) operation;
        executeInternal(createTableASOperation.getCreateTableOperation());
        return executeInternal(createTableASOperation.toSinkModifyOperation(catalogManager));
    } else if (operation instanceof ExecutePlanOperation) {
        ExecutePlanOperation executePlanOperation = (ExecutePlanOperation) operation;
        return (TableResultInternal) executePlan(PlanReference.fromFile(executePlanOperation.getFilePath()));
    } else if (operation instanceof CompilePlanOperation) {
        CompilePlanOperation compilePlanOperation = (CompilePlanOperation) operation;
        compilePlanAndWrite(compilePlanOperation.getFilePath(), compilePlanOperation.isIfNotExists(), compilePlanOperation.getOperation());
        return TableResultImpl.TABLE_RESULT_OK;
    } else if (operation instanceof CompileAndExecutePlanOperation) {
        CompileAndExecutePlanOperation compileAndExecutePlanOperation = (CompileAndExecutePlanOperation) operation;
        CompiledPlan compiledPlan = compilePlanAndWrite(compileAndExecutePlanOperation.getFilePath(), true, compileAndExecutePlanOperation.getOperation());
        return (TableResultInternal) executePlan(compiledPlan);
    } else if (operation instanceof NopOperation) {
        return TableResultImpl.TABLE_RESULT_OK;
    } else {
        throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG);
    }
}
Also used : NopOperation(org.apache.flink.table.operations.NopOperation) AlterViewOperation(org.apache.flink.table.operations.ddl.AlterViewOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) ShowCurrentCatalogOperation(org.apache.flink.table.operations.ShowCurrentCatalogOperation) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) ArrayList(java.util.ArrayList) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) ExecutePlanOperation(org.apache.flink.table.operations.command.ExecutePlanOperation) CompileAndExecutePlanOperation(org.apache.flink.table.operations.CompileAndExecutePlanOperation) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) ShowCreateViewOperation(org.apache.flink.table.operations.ShowCreateViewOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) CompileAndExecutePlanOperation(org.apache.flink.table.operations.CompileAndExecutePlanOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ArrayList(java.util.ArrayList) List(java.util.List) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) CompiledPlan(org.apache.flink.table.api.CompiledPlan) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) Optional(java.util.Optional) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) Catalog(org.apache.flink.table.catalog.Catalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) ExplainDetail(org.apache.flink.table.api.ExplainDetail) ShowCurrentDatabaseOperation(org.apache.flink.table.operations.ShowCurrentDatabaseOperation) AlterTableOperation(org.apache.flink.table.operations.ddl.AlterTableOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) CatalogPartition(org.apache.flink.table.catalog.CatalogPartition) ModifyOperation(org.apache.flink.table.operations.ModifyOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) CollectModifyOperation(org.apache.flink.table.operations.CollectModifyOperation) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) Map(java.util.Map) HashMap(java.util.HashMap) DatabaseAlreadyExistException(org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) ObjectPath(org.apache.flink.table.catalog.ObjectPath) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) CreateCatalogOperation(org.apache.flink.table.operations.ddl.CreateCatalogOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) ShowCreateTableOperation(org.apache.flink.table.operations.ShowCreateTableOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ModifyOperation(org.apache.flink.table.operations.ModifyOperation) ShowCurrentDatabaseOperation(org.apache.flink.table.operations.ShowCurrentDatabaseOperation) ExecutePlanOperation(org.apache.flink.table.operations.command.ExecutePlanOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) ShowColumnsOperation(org.apache.flink.table.operations.ShowColumnsOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) CompilePlanOperation(org.apache.flink.table.operations.ddl.CompilePlanOperation) CreateCatalogOperation(org.apache.flink.table.operations.ddl.CreateCatalogOperation) ShowCreateViewOperation(org.apache.flink.table.operations.ShowCreateViewOperation) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) ShowCatalogsOperation(org.apache.flink.table.operations.ShowCatalogsOperation) CollectModifyOperation(org.apache.flink.table.operations.CollectModifyOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) CompileAndExecutePlanOperation(org.apache.flink.table.operations.CompileAndExecutePlanOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) ShowCurrentCatalogOperation(org.apache.flink.table.operations.ShowCurrentCatalogOperation) AlterTableOperation(org.apache.flink.table.operations.ddl.AlterTableOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) NopOperation(org.apache.flink.table.operations.NopOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) AlterViewOperation(org.apache.flink.table.operations.ddl.AlterViewOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) AlterCatalogFunctionOperation(org.apache.flink.table.operations.ddl.AlterCatalogFunctionOperation) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) TableSourceQueryOperation(org.apache.flink.table.operations.TableSourceQueryOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) DropCatalogOperation(org.apache.flink.table.operations.ddl.DropCatalogOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) ShowCreateTableOperation(org.apache.flink.table.operations.ShowCreateTableOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) ShowCreateViewOperation(org.apache.flink.table.operations.ShowCreateViewOperation) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) AlterCatalogFunctionOperation(org.apache.flink.table.operations.ddl.AlterCatalogFunctionOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) ShowColumnsOperation(org.apache.flink.table.operations.ShowColumnsOperation) CompilePlanOperation(org.apache.flink.table.operations.ddl.CompilePlanOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) TableSourceQueryOperation(org.apache.flink.table.operations.TableSourceQueryOperation) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) TableException(org.apache.flink.table.api.TableException) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ShowCatalogsOperation(org.apache.flink.table.operations.ShowCatalogsOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) DropCatalogOperation(org.apache.flink.table.operations.ddl.DropCatalogOperation) ShowCreateTableOperation(org.apache.flink.table.operations.ShowCreateTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) TableException(org.apache.flink.table.api.TableException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) FunctionNotExistException(org.apache.flink.table.catalog.exceptions.FunctionNotExistException) DatabaseNotEmptyException(org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException) DatabaseAlreadyExistException(org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException) SqlParserException(org.apache.flink.table.api.SqlParserException) ValidationException(org.apache.flink.table.api.ValidationException) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation)

Example 7 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class TableEnvironmentImpl method alterCatalogFunction.

private TableResultInternal alterCatalogFunction(AlterCatalogFunctionOperation alterCatalogFunctionOperation) {
    String exMsg = getDDLOpExecuteErrorMsg(alterCatalogFunctionOperation.asSummaryString());
    try {
        CatalogFunction function = alterCatalogFunctionOperation.getCatalogFunction();
        if (alterCatalogFunctionOperation.isTemporary()) {
            throw new ValidationException("Alter temporary catalog function is not supported");
        } else {
            Catalog catalog = getCatalogOrThrowException(alterCatalogFunctionOperation.getFunctionIdentifier().getCatalogName());
            catalog.alterFunction(alterCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(), function, alterCatalogFunctionOperation.isIfExists());
        }
        return TableResultImpl.TABLE_RESULT_OK;
    } catch (ValidationException e) {
        throw e;
    } catch (FunctionNotExistException e) {
        throw new ValidationException(e.getMessage(), e);
    } catch (Exception e) {
        throw new TableException(exMsg, e);
    }
}
Also used : FunctionNotExistException(org.apache.flink.table.catalog.exceptions.FunctionNotExistException) TableException(org.apache.flink.table.api.TableException) ValidationException(org.apache.flink.table.api.ValidationException) CatalogFunction(org.apache.flink.table.catalog.CatalogFunction) Catalog(org.apache.flink.table.catalog.Catalog) GenericInMemoryCatalog(org.apache.flink.table.catalog.GenericInMemoryCatalog) FunctionCatalog(org.apache.flink.table.catalog.FunctionCatalog) FunctionAlreadyExistException(org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) TableAlreadyExistException(org.apache.flink.table.catalog.exceptions.TableAlreadyExistException) TableException(org.apache.flink.table.api.TableException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) FunctionNotExistException(org.apache.flink.table.catalog.exceptions.FunctionNotExistException) DatabaseNotEmptyException(org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException) DatabaseAlreadyExistException(org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException) SqlParserException(org.apache.flink.table.api.SqlParserException) ValidationException(org.apache.flink.table.api.ValidationException) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException)

Example 8 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class PushPartitionIntoTableSourceScanRule method onMatch.

@Override
public void onMatch(RelOptRuleCall call) {
    Filter filter = call.rel(0);
    LogicalTableScan scan = call.rel(1);
    TableSourceTable tableSourceTable = scan.getTable().unwrap(TableSourceTable.class);
    RelDataType inputFieldTypes = filter.getInput().getRowType();
    List<String> inputFieldNames = inputFieldTypes.getFieldNames();
    List<String> partitionFieldNames = tableSourceTable.contextResolvedTable().<ResolvedCatalogTable>getResolvedTable().getPartitionKeys();
    // extract partition predicates
    RelBuilder relBuilder = call.builder();
    RexBuilder rexBuilder = relBuilder.getRexBuilder();
    Tuple2<Seq<RexNode>, Seq<RexNode>> allPredicates = RexNodeExtractor.extractPartitionPredicateList(filter.getCondition(), FlinkRelOptUtil.getMaxCnfNodeCount(scan), inputFieldNames.toArray(new String[0]), rexBuilder, partitionFieldNames.toArray(new String[0]));
    RexNode partitionPredicate = RexUtil.composeConjunction(rexBuilder, JavaConversions.seqAsJavaList(allPredicates._1));
    if (partitionPredicate.isAlwaysTrue()) {
        return;
    }
    // build pruner
    LogicalType[] partitionFieldTypes = partitionFieldNames.stream().map(name -> {
        int index = inputFieldNames.indexOf(name);
        if (index < 0) {
            throw new TableException(String.format("Partitioned key '%s' isn't found in input columns. " + "Validator should have checked that.", name));
        }
        return inputFieldTypes.getFieldList().get(index).getType();
    }).map(FlinkTypeFactory::toLogicalType).toArray(LogicalType[]::new);
    RexNode finalPartitionPredicate = adjustPartitionPredicate(inputFieldNames, partitionFieldNames, partitionPredicate);
    FlinkContext context = ShortcutUtils.unwrapContext(scan);
    Function<List<Map<String, String>>, List<Map<String, String>>> defaultPruner = partitions -> PartitionPruner.prunePartitions(context.getTableConfig(), partitionFieldNames.toArray(new String[0]), partitionFieldTypes, partitions, finalPartitionPredicate);
    // prune partitions
    List<Map<String, String>> remainingPartitions = readPartitionsAndPrune(rexBuilder, context, tableSourceTable, defaultPruner, allPredicates._1(), inputFieldNames);
    // apply push down
    DynamicTableSource dynamicTableSource = tableSourceTable.tableSource().copy();
    PartitionPushDownSpec partitionPushDownSpec = new PartitionPushDownSpec(remainingPartitions);
    partitionPushDownSpec.apply(dynamicTableSource, SourceAbilityContext.from(scan));
    // build new statistic
    TableStats newTableStat = null;
    if (tableSourceTable.contextResolvedTable().isPermanent()) {
        ObjectIdentifier identifier = tableSourceTable.contextResolvedTable().getIdentifier();
        ObjectPath tablePath = identifier.toObjectPath();
        Catalog catalog = tableSourceTable.contextResolvedTable().getCatalog().get();
        for (Map<String, String> partition : remainingPartitions) {
            Optional<TableStats> partitionStats = getPartitionStats(catalog, tablePath, partition);
            if (!partitionStats.isPresent()) {
                // clear all information before
                newTableStat = null;
                break;
            } else {
                newTableStat = newTableStat == null ? partitionStats.get() : newTableStat.merge(partitionStats.get());
            }
        }
    }
    FlinkStatistic newStatistic = FlinkStatistic.builder().statistic(tableSourceTable.getStatistic()).tableStats(newTableStat).build();
    TableSourceTable newTableSourceTable = tableSourceTable.copy(dynamicTableSource, newStatistic, new SourceAbilitySpec[] { partitionPushDownSpec });
    LogicalTableScan newScan = LogicalTableScan.create(scan.getCluster(), newTableSourceTable, scan.getHints());
    // transform to new node
    RexNode nonPartitionPredicate = RexUtil.composeConjunction(rexBuilder, JavaConversions.seqAsJavaList(allPredicates._2()));
    if (nonPartitionPredicate.isAlwaysTrue()) {
        call.transformTo(newScan);
    } else {
        Filter newFilter = filter.copy(filter.getTraitSet(), newScan, nonPartitionPredicate);
        call.transformTo(newFilter);
    }
}
Also used : CatalogColumnStatistics(org.apache.flink.table.catalog.stats.CatalogColumnStatistics) Arrays(java.util.Arrays) SourceAbilityContext(org.apache.flink.table.planner.plan.abilities.source.SourceAbilityContext) PartitionNotExistException(org.apache.flink.table.catalog.exceptions.PartitionNotExistException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ShortcutUtils(org.apache.flink.table.planner.utils.ShortcutUtils) SupportsPartitionPushDown(org.apache.flink.table.connector.source.abilities.SupportsPartitionPushDown) FlinkTypeFactory(org.apache.flink.table.planner.calcite.FlinkTypeFactory) RexUtil(org.apache.calcite.rex.RexUtil) RexNode(org.apache.calcite.rex.RexNode) RelBuilder(org.apache.calcite.tools.RelBuilder) ResolvedExpression(org.apache.flink.table.expressions.ResolvedExpression) Map(java.util.Map) TableStats(org.apache.flink.table.plan.stats.TableStats) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) PartitionPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.PartitionPushDownSpec) TimeZone(java.util.TimeZone) Seq(scala.collection.Seq) FlinkContext(org.apache.flink.table.planner.calcite.FlinkContext) Tuple2(scala.Tuple2) Collectors(java.util.stream.Collectors) SourceAbilitySpec(org.apache.flink.table.planner.plan.abilities.source.SourceAbilitySpec) RexInputRef(org.apache.calcite.rex.RexInputRef) List(java.util.List) TableNotPartitionedException(org.apache.flink.table.catalog.exceptions.TableNotPartitionedException) LogicalType(org.apache.flink.table.types.logical.LogicalType) Optional(java.util.Optional) RexNodeToExpressionConverter(org.apache.flink.table.planner.plan.utils.RexNodeToExpressionConverter) LogicalTableScan(org.apache.calcite.rel.logical.LogicalTableScan) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) CatalogTableStatisticsConverter(org.apache.flink.table.planner.utils.CatalogTableStatisticsConverter) RexNodeExtractor(org.apache.flink.table.planner.plan.utils.RexNodeExtractor) Expression(org.apache.flink.table.expressions.Expression) Filter(org.apache.calcite.rel.core.Filter) ObjectPath(org.apache.flink.table.catalog.ObjectPath) Function(java.util.function.Function) ArrayList(java.util.ArrayList) CatalogTableStatistics(org.apache.flink.table.catalog.stats.CatalogTableStatistics) Catalog(org.apache.flink.table.catalog.Catalog) PartitionPruner(org.apache.flink.table.planner.plan.utils.PartitionPruner) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) RelDataType(org.apache.calcite.rel.type.RelDataType) JavaConversions(scala.collection.JavaConversions) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) RexBuilder(org.apache.calcite.rex.RexBuilder) TableException(org.apache.flink.table.api.TableException) Option(scala.Option) FlinkRelOptUtil(org.apache.flink.table.planner.plan.utils.FlinkRelOptUtil) TableSourceTable(org.apache.flink.table.planner.plan.schema.TableSourceTable) RelOptRuleCall(org.apache.calcite.plan.RelOptRuleCall) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) RelOptRule(org.apache.calcite.plan.RelOptRule) FlinkStatistic(org.apache.flink.table.planner.plan.stats.FlinkStatistic) RexShuttle(org.apache.calcite.rex.RexShuttle) CatalogException(org.apache.flink.table.catalog.exceptions.CatalogException) ObjectPath(org.apache.flink.table.catalog.ObjectPath) LogicalType(org.apache.flink.table.types.logical.LogicalType) RelDataType(org.apache.calcite.rel.type.RelDataType) FlinkStatistic(org.apache.flink.table.planner.plan.stats.FlinkStatistic) RexBuilder(org.apache.calcite.rex.RexBuilder) List(java.util.List) ArrayList(java.util.ArrayList) TableSourceTable(org.apache.flink.table.planner.plan.schema.TableSourceTable) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) TableException(org.apache.flink.table.api.TableException) RelBuilder(org.apache.calcite.tools.RelBuilder) FlinkContext(org.apache.flink.table.planner.calcite.FlinkContext) TableStats(org.apache.flink.table.plan.stats.TableStats) LogicalTableScan(org.apache.calcite.rel.logical.LogicalTableScan) Catalog(org.apache.flink.table.catalog.Catalog) PartitionPushDownSpec(org.apache.flink.table.planner.plan.abilities.source.PartitionPushDownSpec) Filter(org.apache.calcite.rel.core.Filter) Map(java.util.Map) Seq(scala.collection.Seq) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) RexNode(org.apache.calcite.rex.RexNode)

Example 9 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class PushPartitionIntoTableSourceScanRule method readPartitionsAndPrune.

private List<Map<String, String>> readPartitionsAndPrune(RexBuilder rexBuilder, FlinkContext context, TableSourceTable tableSourceTable, Function<List<Map<String, String>>, List<Map<String, String>>> pruner, Seq<RexNode> partitionPredicate, List<String> inputFieldNames) {
    // get partitions from table/catalog and prune
    Optional<Catalog> catalogOptional = tableSourceTable.contextResolvedTable().getCatalog();
    DynamicTableSource dynamicTableSource = tableSourceTable.tableSource();
    Optional<List<Map<String, String>>> optionalPartitions = ((SupportsPartitionPushDown) dynamicTableSource).listPartitions();
    if (optionalPartitions.isPresent()) {
        return pruner.apply(optionalPartitions.get());
    } else {
        // we will read partitions from catalog if table doesn't support listPartitions.
        if (!catalogOptional.isPresent()) {
            throw new TableException(String.format("Table '%s' connector doesn't provide partitions, and it cannot be loaded from the catalog", tableSourceTable.contextResolvedTable().getIdentifier().asSummaryString()));
        }
        try {
            return readPartitionFromCatalogAndPrune(rexBuilder, context, catalogOptional.get(), tableSourceTable.contextResolvedTable().getIdentifier(), inputFieldNames, partitionPredicate, pruner);
        } catch (TableNotExistException tableNotExistException) {
            throw new TableException(String.format("Table %s is not found in catalog.", tableSourceTable.contextResolvedTable().getIdentifier().asSummaryString()));
        } catch (TableNotPartitionedException tableNotPartitionedException) {
            throw new TableException(String.format("Table %s is not a partitionable source. Validator should have checked it.", tableSourceTable.contextResolvedTable().getIdentifier().asSummaryString()), tableNotPartitionedException);
        }
    }
}
Also used : TableException(org.apache.flink.table.api.TableException) TableNotPartitionedException(org.apache.flink.table.catalog.exceptions.TableNotPartitionedException) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException) List(java.util.List) ArrayList(java.util.ArrayList) Catalog(org.apache.flink.table.catalog.Catalog) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) SupportsPartitionPushDown(org.apache.flink.table.connector.source.abilities.SupportsPartitionPushDown)

Example 10 with Catalog

use of org.apache.flink.table.catalog.Catalog in project flink by apache.

the class SqlToOperationConverter method convertAlterDatabase.

/**
 * Convert ALTER DATABASE statement.
 */
private Operation convertAlterDatabase(SqlAlterDatabase sqlAlterDatabase) {
    String[] fullDatabaseName = sqlAlterDatabase.fullDatabaseName();
    if (fullDatabaseName.length > 2) {
        throw new ValidationException("alter database identifier format error");
    }
    String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
    String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
    final Map<String, String> properties;
    CatalogDatabase originCatalogDatabase;
    Optional<Catalog> catalog = catalogManager.getCatalog(catalogName);
    if (catalog.isPresent()) {
        try {
            originCatalogDatabase = catalog.get().getDatabase(databaseName);
            properties = new HashMap<>(originCatalogDatabase.getProperties());
        } catch (DatabaseNotExistException e) {
            throw new ValidationException(String.format("Database %s not exists", databaseName), e);
        }
    } else {
        throw new ValidationException(String.format("Catalog %s not exists", catalogName));
    }
    // set with properties
    sqlAlterDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
    CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, originCatalogDatabase.getComment());
    return new AlterDatabaseOperation(catalogName, databaseName, catalogDatabase);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) SqlTableOption(org.apache.flink.sql.parser.ddl.SqlTableOption) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) ValidationException(org.apache.flink.table.api.ValidationException) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) SqlShowCurrentCatalog(org.apache.flink.sql.parser.dql.SqlShowCurrentCatalog) Catalog(org.apache.flink.table.catalog.Catalog) SqlUseCatalog(org.apache.flink.sql.parser.ddl.SqlUseCatalog) SqlDropCatalog(org.apache.flink.sql.parser.ddl.SqlDropCatalog) SqlCreateCatalog(org.apache.flink.sql.parser.ddl.SqlCreateCatalog) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Aggregations

Catalog (org.apache.flink.table.catalog.Catalog)23 HashMap (java.util.HashMap)10 ValidationException (org.apache.flink.table.api.ValidationException)10 Test (org.junit.Test)9 TableNotExistException (org.apache.flink.table.catalog.exceptions.TableNotExistException)8 GenericInMemoryCatalog (org.apache.flink.table.catalog.GenericInMemoryCatalog)7 ObjectPath (org.apache.flink.table.catalog.ObjectPath)7 TableException (org.apache.flink.table.api.TableException)6 FunctionCatalog (org.apache.flink.table.catalog.FunctionCatalog)6 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)6 DatabaseNotExistException (org.apache.flink.table.catalog.exceptions.DatabaseNotExistException)6 HiveCatalog (org.apache.flink.table.catalog.hive.HiveCatalog)6 CatalogTable (org.apache.flink.table.catalog.CatalogTable)5 IOException (java.io.IOException)4 ArrayList (java.util.ArrayList)4 List (java.util.List)4 ExecutionException (java.util.concurrent.ExecutionException)4 SqlParserException (org.apache.flink.table.api.SqlParserException)4 DatabaseAlreadyExistException (org.apache.flink.table.catalog.exceptions.DatabaseAlreadyExistException)4 DatabaseNotEmptyException (org.apache.flink.table.catalog.exceptions.DatabaseNotEmptyException)4