Search in sources :

Example 6 with CatalogView

use of org.apache.flink.table.catalog.CatalogView in project flink by apache.

the class FlinkCalciteCatalogReader method toPreparingTable.

/**
 * Translate this {@link CatalogSchemaTable} into Flink source table.
 */
private static FlinkPreparingTableBase toPreparingTable(RelOptSchema relOptSchema, List<String> names, RelDataType rowType, CatalogSchemaTable schemaTable) {
    final ResolvedCatalogBaseTable<?> resolvedBaseTable = schemaTable.getContextResolvedTable().getResolvedTable();
    final CatalogBaseTable originTable = resolvedBaseTable.getOrigin();
    if (originTable instanceof QueryOperationCatalogView) {
        return convertQueryOperationView(relOptSchema, names, rowType, (QueryOperationCatalogView) originTable);
    } else if (originTable instanceof ConnectorCatalogTable) {
        ConnectorCatalogTable<?, ?> connectorTable = (ConnectorCatalogTable<?, ?>) originTable;
        if ((connectorTable).getTableSource().isPresent()) {
            return convertLegacyTableSource(relOptSchema, rowType, schemaTable.getContextResolvedTable().getIdentifier(), connectorTable, schemaTable.getStatistic(), schemaTable.isStreamingMode());
        } else {
            throw new ValidationException("Cannot convert a connector table " + "without source.");
        }
    } else if (originTable instanceof CatalogView) {
        return convertCatalogView(relOptSchema, names, rowType, schemaTable.getStatistic(), (CatalogView) originTable);
    } else if (originTable instanceof CatalogTable) {
        return convertCatalogTable(relOptSchema, names, rowType, schemaTable);
    } else {
        throw new ValidationException("Unsupported table type: " + originTable);
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ResolvedCatalogBaseTable(org.apache.flink.table.catalog.ResolvedCatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) QueryOperationCatalogView(org.apache.flink.table.catalog.QueryOperationCatalogView) ConnectorCatalogTable(org.apache.flink.table.catalog.ConnectorCatalogTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogView(org.apache.flink.table.catalog.CatalogView) QueryOperationCatalogView(org.apache.flink.table.catalog.QueryOperationCatalogView)

Example 7 with CatalogView

use of org.apache.flink.table.catalog.CatalogView in project flink by apache.

the class SqlToOperationConverter method convertAlterView.

/**
 * convert ALTER VIEW statement.
 */
private Operation convertAlterView(SqlAlterView alterView) {
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(alterView.fullViewName());
    ObjectIdentifier viewIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(viewIdentifier);
    if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
        throw new ValidationException(String.format("View %s doesn't exist or is a temporary view.", viewIdentifier.toString()));
    }
    CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
    if (baseTable instanceof CatalogTable) {
        throw new ValidationException("ALTER VIEW for a table is not allowed");
    }
    if (alterView instanceof SqlAlterViewRename) {
        UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterViewRename) alterView).fullNewViewName());
        ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
        return new AlterViewRenameOperation(viewIdentifier, newTableIdentifier);
    } else if (alterView instanceof SqlAlterViewProperties) {
        SqlAlterViewProperties alterViewProperties = (SqlAlterViewProperties) alterView;
        CatalogView oldView = (CatalogView) baseTable;
        Map<String, String> newProperties = new HashMap<>(oldView.getOptions());
        newProperties.putAll(OperationConverterUtils.extractProperties(alterViewProperties.getPropertyList()));
        CatalogView newView = new CatalogViewImpl(oldView.getOriginalQuery(), oldView.getExpandedQuery(), oldView.getSchema(), newProperties, oldView.getComment());
        return new AlterViewPropertiesOperation(viewIdentifier, newView);
    } else if (alterView instanceof SqlAlterViewAs) {
        SqlAlterViewAs alterViewAs = (SqlAlterViewAs) alterView;
        final SqlNode newQuery = alterViewAs.getNewQuery();
        CatalogView oldView = (CatalogView) baseTable;
        CatalogView newView = convertViewQuery(newQuery, Collections.emptyList(), oldView.getOptions(), oldView.getComment());
        return new AlterViewAsOperation(viewIdentifier, newView);
    } else {
        throw new ValidationException(String.format("[%s] needs to implement", alterView.toSqlString(CalciteSqlDialect.DEFAULT)));
    }
}
Also used : AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) SqlAlterViewRename(org.apache.flink.sql.parser.ddl.SqlAlterViewRename) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) SqlAlterViewAs(org.apache.flink.sql.parser.ddl.SqlAlterViewAs) SqlAlterViewProperties(org.apache.flink.sql.parser.ddl.SqlAlterViewProperties) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogView(org.apache.flink.table.catalog.CatalogView) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) SqlNode(org.apache.calcite.sql.SqlNode)

Example 8 with CatalogView

use of org.apache.flink.table.catalog.CatalogView in project flink by apache.

the class SqlToOperationConverter method convertCreateView.

/**
 * Convert CREATE VIEW statement.
 */
private Operation convertCreateView(SqlCreateView sqlCreateView) {
    final SqlNode query = sqlCreateView.getQuery();
    final SqlNodeList fieldList = sqlCreateView.getFieldList();
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlCreateView.fullViewName());
    ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    String comment = sqlCreateView.getComment().map(c -> c.getNlsString().getValue()).orElse(null);
    CatalogView catalogView = convertViewQuery(query, fieldList.getList(), OperationConverterUtils.extractProperties(sqlCreateView.getProperties().orElse(null)), comment);
    return new CreateViewOperation(identifier, catalogView, sqlCreateView.isIfNotExists(), sqlCreateView.isTemporary());
}
Also used : SqlAlterTableReset(org.apache.flink.sql.parser.ddl.SqlAlterTableReset) ModifyOperation(org.apache.flink.table.operations.ModifyOperation) SqlShowCurrentCatalog(org.apache.flink.sql.parser.dql.SqlShowCurrentCatalog) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) SqlTableOption(org.apache.flink.sql.parser.ddl.SqlTableOption) FlinkPlannerImpl(org.apache.flink.table.planner.calcite.FlinkPlannerImpl) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) ShowCurrentDatabaseOperation(org.apache.flink.table.operations.ShowCurrentDatabaseOperation) SqlDropPartitions(org.apache.flink.sql.parser.ddl.SqlDropPartitions) SqlShowViews(org.apache.flink.sql.parser.dql.SqlShowViews) SqlAlterFunction(org.apache.flink.sql.parser.ddl.SqlAlterFunction) SqlEndStatementSet(org.apache.flink.sql.parser.dml.SqlEndStatementSet) SqlRichDescribeTable(org.apache.flink.sql.parser.dql.SqlRichDescribeTable) SqlShowPartitions(org.apache.flink.sql.parser.dql.SqlShowPartitions) Map(java.util.Map) SqlRemoveJar(org.apache.flink.sql.parser.ddl.SqlRemoveJar) FlinkHints(org.apache.flink.table.planner.hint.FlinkHints) CatalogPartitionImpl(org.apache.flink.table.catalog.CatalogPartitionImpl) SqlCreateTable(org.apache.flink.sql.parser.ddl.SqlCreateTable) SqlShowCreateTable(org.apache.flink.sql.parser.dql.SqlShowCreateTable) ExecutePlanOperation(org.apache.flink.table.operations.command.ExecutePlanOperation) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) SqlUseModules(org.apache.flink.sql.parser.ddl.SqlUseModules) SqlUseDatabase(org.apache.flink.sql.parser.ddl.SqlUseDatabase) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) SqlChangeColumn(org.apache.flink.sql.parser.ddl.SqlChangeColumn) ShowColumnsOperation(org.apache.flink.table.operations.ShowColumnsOperation) SqlKind(org.apache.calcite.sql.SqlKind) SqlAlterTable(org.apache.flink.sql.parser.ddl.SqlAlterTable) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) AlterTableDropConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation) Set(java.util.Set) TableSchema(org.apache.flink.table.api.TableSchema) SqlAddReplaceColumns(org.apache.flink.sql.parser.ddl.SqlAddReplaceColumns) CompilePlanOperation(org.apache.flink.table.operations.ddl.CompilePlanOperation) CreateCatalogOperation(org.apache.flink.table.operations.ddl.CreateCatalogOperation) SqlDropDatabase(org.apache.flink.sql.parser.ddl.SqlDropDatabase) ShowCreateViewOperation(org.apache.flink.table.operations.ShowCreateViewOperation) OperationConverterUtils(org.apache.flink.table.planner.utils.OperationConverterUtils) UseCatalogOperation(org.apache.flink.table.operations.UseCatalogOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CatalogFunction(org.apache.flink.table.catalog.CatalogFunction) FactoryUtil(org.apache.flink.table.factories.FactoryUtil) ShowCatalogsOperation(org.apache.flink.table.operations.ShowCatalogsOperation) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl) SqlAlterTableAddConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableAddConstraint) RichSqlInsert(org.apache.flink.sql.parser.dml.RichSqlInsert) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) ShowJarsOperation(org.apache.flink.table.operations.command.ShowJarsOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) SqlAlterViewRename(org.apache.flink.sql.parser.ddl.SqlAlterViewRename) QueryOperation(org.apache.flink.table.operations.QueryOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) SqlAlterDatabase(org.apache.flink.sql.parser.ddl.SqlAlterDatabase) CompileAndExecutePlanOperation(org.apache.flink.table.operations.CompileAndExecutePlanOperation) CatalogFunctionImpl(org.apache.flink.table.catalog.CatalogFunctionImpl) SqlUnloadModule(org.apache.flink.sql.parser.dql.SqlUnloadModule) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) EndStatementSetOperation(org.apache.flink.table.operations.EndStatementSetOperation) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) SqlShowCreateView(org.apache.flink.sql.parser.dql.SqlShowCreateView) SqlAlterView(org.apache.flink.sql.parser.ddl.SqlAlterView) UseModulesOperation(org.apache.flink.table.operations.UseModulesOperation) CatalogView(org.apache.flink.table.catalog.CatalogView) Catalog(org.apache.flink.table.catalog.Catalog) SqlIdentifier(org.apache.calcite.sql.SqlIdentifier) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) Expander(org.apache.flink.table.planner.utils.Expander) CalciteSqlDialect(org.apache.calcite.sql.dialect.CalciteSqlDialect) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) SqlDropTable(org.apache.flink.sql.parser.ddl.SqlDropTable) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) SqlLoadModule(org.apache.flink.sql.parser.dql.SqlLoadModule) ShowCurrentCatalogOperation(org.apache.flink.table.operations.ShowCurrentCatalogOperation) FunctionScope(org.apache.flink.table.operations.ShowFunctionsOperation.FunctionScope) SqlCreateFunction(org.apache.flink.sql.parser.ddl.SqlCreateFunction) SqlAddJar(org.apache.flink.sql.parser.ddl.SqlAddJar) SqlCreateDatabase(org.apache.flink.sql.parser.ddl.SqlCreateDatabase) TableException(org.apache.flink.table.api.TableException) SqlTableConstraint(org.apache.flink.sql.parser.ddl.constraint.SqlTableConstraint) SqlAddPartitions(org.apache.flink.sql.parser.ddl.SqlAddPartitions) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) SqlCompileAndExecutePlan(org.apache.flink.sql.parser.dml.SqlCompileAndExecutePlan) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) SqlShowColumns(org.apache.flink.sql.parser.dql.SqlShowColumns) SqlCreateView(org.apache.flink.sql.parser.ddl.SqlCreateView) SqlStatementSet(org.apache.flink.sql.parser.dml.SqlStatementSet) SqlShowCurrentDatabase(org.apache.flink.sql.parser.dql.SqlShowCurrentDatabase) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) SqlParser(org.apache.calcite.sql.parser.SqlParser) SqlShowFunctions(org.apache.flink.sql.parser.dql.SqlShowFunctions) SqlAlterTableOptions(org.apache.flink.sql.parser.ddl.SqlAlterTableOptions) TableSchemaUtils(org.apache.flink.table.utils.TableSchemaUtils) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) Schema(org.apache.flink.table.api.Schema) SqlUseCatalog(org.apache.flink.sql.parser.ddl.SqlUseCatalog) SqlExecutePlan(org.apache.flink.sql.parser.dml.SqlExecutePlan) SqlShowDatabases(org.apache.flink.sql.parser.dql.SqlShowDatabases) SqlDropView(org.apache.flink.sql.parser.ddl.SqlDropView) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) SqlNode(org.apache.calcite.sql.SqlNode) SqlUtil(org.apache.calcite.sql.SqlUtil) SetOperation(org.apache.flink.table.operations.command.SetOperation) RelHint(org.apache.calcite.rel.hint.RelHint) SqlAlterViewAs(org.apache.flink.sql.parser.ddl.SqlAlterViewAs) SqlAlterTableRename(org.apache.flink.sql.parser.ddl.SqlAlterTableRename) ManagedTableListener(org.apache.flink.table.catalog.ManagedTableListener) SqlDropCatalog(org.apache.flink.sql.parser.ddl.SqlDropCatalog) SqlDropFunction(org.apache.flink.sql.parser.ddl.SqlDropFunction) SqlShowCatalogs(org.apache.flink.sql.parser.dql.SqlShowCatalogs) LoadModuleOperation(org.apache.flink.table.operations.LoadModuleOperation) Operation(org.apache.flink.table.operations.Operation) AlterCatalogFunctionOperation(org.apache.flink.table.operations.ddl.AlterCatalogFunctionOperation) SqlAlterTableDropConstraint(org.apache.flink.sql.parser.ddl.SqlAlterTableDropConstraint) SqlCompilePlan(org.apache.flink.sql.parser.ddl.SqlCompilePlan) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) SqlAlterTableCompact(org.apache.flink.sql.parser.ddl.SqlAlterTableCompact) FunctionLanguage(org.apache.flink.table.catalog.FunctionLanguage) StringUtils(org.apache.flink.util.StringUtils) Collectors(java.util.stream.Collectors) List(java.util.List) SqlShowModules(org.apache.flink.sql.parser.dql.SqlShowModules) ShowModulesOperation(org.apache.flink.table.operations.ShowModulesOperation) SqlRichExplain(org.apache.flink.sql.parser.dql.SqlRichExplain) SourceQueryOperation(org.apache.flink.table.operations.SourceQueryOperation) UnloadModuleOperation(org.apache.flink.table.operations.UnloadModuleOperation) ValidationException(org.apache.flink.table.api.ValidationException) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) Optional(java.util.Optional) CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) RemoveJarOperation(org.apache.flink.table.operations.command.RemoveJarOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) SqlSet(org.apache.flink.sql.parser.ddl.SqlSet) CatalogManager(org.apache.flink.table.catalog.CatalogManager) SqlAlterViewProperties(org.apache.flink.sql.parser.ddl.SqlAlterViewProperties) BeginStatementSetOperation(org.apache.flink.table.operations.BeginStatementSetOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) RelRoot(org.apache.calcite.rel.RelRoot) HashMap(java.util.HashMap) AddJarOperation(org.apache.flink.table.operations.command.AddJarOperation) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) AlterTableAddConstraintOperation(org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation) HashSet(java.util.HashSet) ExplainOperation(org.apache.flink.table.operations.ExplainOperation) ResetOperation(org.apache.flink.table.operations.command.ResetOperation) HintStrategyTable(org.apache.calcite.rel.hint.HintStrategyTable) SqlShowTables(org.apache.flink.sql.parser.dql.SqlShowTables) CatalogPartition(org.apache.flink.table.catalog.CatalogPartition) StatementSetOperation(org.apache.flink.table.operations.StatementSetOperation) SqlBeginStatementSet(org.apache.flink.sql.parser.dml.SqlBeginStatementSet) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) DropCatalogOperation(org.apache.flink.table.operations.ddl.DropCatalogOperation) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) SqlReset(org.apache.flink.sql.parser.ddl.SqlReset) SqlShowJars(org.apache.flink.sql.parser.dql.SqlShowJars) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) SqlDialect(org.apache.calcite.sql.SqlDialect) SqlCreateCatalog(org.apache.flink.sql.parser.ddl.SqlCreateCatalog) SqlExecute(org.apache.flink.sql.parser.dml.SqlExecute) ShowCreateTableOperation(org.apache.flink.table.operations.ShowCreateTableOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation) SqlNodeList(org.apache.calcite.sql.SqlNodeList) Collections(java.util.Collections) ShowCreateViewOperation(org.apache.flink.table.operations.ShowCreateViewOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) SqlNodeList(org.apache.calcite.sql.SqlNodeList) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) CatalogView(org.apache.flink.table.catalog.CatalogView) SqlNode(org.apache.calcite.sql.SqlNode) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 9 with CatalogView

use of org.apache.flink.table.catalog.CatalogView in project flink by apache.

the class HiveCatalogITCase method testViewSchema.

@Test
public void testViewSchema() throws Exception {
    TableEnvironment tableEnv = HiveTestUtils.createTableEnvInBatchMode(SqlDialect.DEFAULT);
    tableEnv.registerCatalog(hiveCatalog.getName(), hiveCatalog);
    tableEnv.useCatalog(hiveCatalog.getName());
    tableEnv.executeSql("create database db1");
    try {
        tableEnv.useDatabase("db1");
        tableEnv.executeSql("create table src(x int,ts timestamp(3)) with ('connector'='datagen','number-of-rows'='10')");
        tableEnv.executeSql("create view v1 as select x,ts from src order by x limit 3");
        CatalogView catalogView = (CatalogView) hiveCatalog.getTable(new ObjectPath("db1", "v1"));
        Schema viewSchema = catalogView.getUnresolvedSchema();
        assertThat(viewSchema).isEqualTo(Schema.newBuilder().fromFields(new String[] { "x", "ts" }, new AbstractDataType[] { DataTypes.INT(), DataTypes.TIMESTAMP(3) }).build());
        List<Row> results = CollectionUtil.iteratorToList(tableEnv.executeSql("select x from v1").collect());
        assertThat(results).hasSize(3);
        tableEnv.executeSql("create view v2 (v2_x,v2_ts) comment 'v2 comment' as select x,cast(ts as timestamp_ltz(3)) from v1");
        catalogView = (CatalogView) hiveCatalog.getTable(new ObjectPath("db1", "v2"));
        assertThat(catalogView.getUnresolvedSchema()).isEqualTo(Schema.newBuilder().fromFields(new String[] { "v2_x", "v2_ts" }, new AbstractDataType[] { DataTypes.INT(), DataTypes.TIMESTAMP_LTZ(3) }).build());
        assertThat(catalogView.getComment()).isEqualTo("v2 comment");
        results = CollectionUtil.iteratorToList(tableEnv.executeSql("select * from v2").collect());
        assertThat(results).hasSize(3);
    } finally {
        tableEnv.executeSql("drop database db1 cascade");
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) Schema(org.apache.flink.table.api.Schema) TableSchema(org.apache.flink.table.api.TableSchema) TableEnvironment(org.apache.flink.table.api.TableEnvironment) Row(org.apache.flink.types.Row) CatalogView(org.apache.flink.table.catalog.CatalogView) Test(org.junit.Test)

Example 10 with CatalogView

use of org.apache.flink.table.catalog.CatalogView in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterViewProps.

private Operation convertAlterViewProps(CatalogBaseTable oldBaseTable, String tableName, Map<String, String> newProps) {
    ObjectIdentifier viewIdentifier = parseObjectIdentifier(tableName);
    CatalogView oldView = (CatalogView) oldBaseTable;
    Map<String, String> props = new HashMap<>(oldView.getOptions());
    props.putAll(newProps);
    CatalogView newView = new CatalogViewImpl(oldView.getOriginalQuery(), oldView.getExpandedQuery(), oldView.getSchema(), props, oldView.getComment());
    return new AlterViewPropertiesOperation(viewIdentifier, newView);
}
Also used : AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

CatalogView (org.apache.flink.table.catalog.CatalogView)12 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)10 CatalogTable (org.apache.flink.table.catalog.CatalogTable)8 ValidationException (org.apache.flink.table.api.ValidationException)7 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)7 HashMap (java.util.HashMap)6 LinkedHashMap (java.util.LinkedHashMap)5 TableSchema (org.apache.flink.table.api.TableSchema)5 CatalogViewImpl (org.apache.flink.table.catalog.CatalogViewImpl)4 ArrayList (java.util.ArrayList)3 Map (java.util.Map)3 Schema (org.apache.flink.table.api.Schema)3 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)3 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)3 UnresolvedIdentifier (org.apache.flink.table.catalog.UnresolvedIdentifier)3 AlterViewAsOperation (org.apache.flink.table.operations.ddl.AlterViewAsOperation)3 AlterViewPropertiesOperation (org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation)3 List (java.util.List)2 RelHint (org.apache.calcite.rel.hint.RelHint)2 SqlNode (org.apache.calcite.sql.SqlNode)2