Search in sources :

Example 1 with CatalogViewImpl

use of org.apache.flink.table.catalog.CatalogViewImpl in project flink by apache.

the class HiveCatalog method instantiateCatalogTable.

@VisibleForTesting
CatalogBaseTable instantiateCatalogTable(Table hiveTable) {
    boolean isView = TableType.valueOf(hiveTable.getTableType()) == TableType.VIRTUAL_VIEW;
    // Table properties
    Map<String, String> properties = new HashMap<>(hiveTable.getParameters());
    boolean isHiveTable = isHiveTable(properties);
    TableSchema tableSchema;
    // Partition keys
    List<String> partitionKeys = new ArrayList<>();
    if (isHiveTable) {
        // Table schema
        tableSchema = HiveTableUtil.createTableSchema(hiveConf, hiveTable, client, hiveShim);
        if (!hiveTable.getPartitionKeys().isEmpty()) {
            partitionKeys = getFieldNames(hiveTable.getPartitionKeys());
        }
    } else {
        properties = retrieveFlinkProperties(properties);
        if (ManagedTableFactory.DEFAULT_IDENTIFIER.equalsIgnoreCase(properties.get(CONNECTOR.key()))) {
            // for Flink's managed table, we remove the connector option
            properties.remove(CONNECTOR.key());
        }
        DescriptorProperties tableSchemaProps = new DescriptorProperties(true);
        tableSchemaProps.putProperties(properties);
        // try to get table schema with both new and old (1.10) key, in order to support tables
        // created in old version
        tableSchema = tableSchemaProps.getOptionalTableSchema(Schema.SCHEMA).orElseGet(() -> tableSchemaProps.getOptionalTableSchema("generic.table.schema").orElseGet(() -> TableSchema.builder().build()));
        partitionKeys = tableSchemaProps.getPartitionKeys();
        // remove the schema from properties
        properties = CatalogTableImpl.removeRedundant(properties, tableSchema, partitionKeys);
    }
    String comment = properties.remove(HiveCatalogConfig.COMMENT);
    if (isView) {
        return new CatalogViewImpl(hiveTable.getViewOriginalText(), hiveTable.getViewExpandedText(), tableSchema, properties, comment);
    } else {
        return new CatalogTableImpl(tableSchema, partitionKeys, properties, comment);
    }
}
Also used : CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) ArrayList(java.util.ArrayList) VisibleForTesting(org.apache.flink.annotation.VisibleForTesting)

Example 2 with CatalogViewImpl

use of org.apache.flink.table.catalog.CatalogViewImpl in project flink by apache.

the class SqlToOperationConverter method convertAlterView.

/**
 * convert ALTER VIEW statement.
 */
private Operation convertAlterView(SqlAlterView alterView) {
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(alterView.fullViewName());
    ObjectIdentifier viewIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(viewIdentifier);
    if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
        throw new ValidationException(String.format("View %s doesn't exist or is a temporary view.", viewIdentifier.toString()));
    }
    CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
    if (baseTable instanceof CatalogTable) {
        throw new ValidationException("ALTER VIEW for a table is not allowed");
    }
    if (alterView instanceof SqlAlterViewRename) {
        UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterViewRename) alterView).fullNewViewName());
        ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
        return new AlterViewRenameOperation(viewIdentifier, newTableIdentifier);
    } else if (alterView instanceof SqlAlterViewProperties) {
        SqlAlterViewProperties alterViewProperties = (SqlAlterViewProperties) alterView;
        CatalogView oldView = (CatalogView) baseTable;
        Map<String, String> newProperties = new HashMap<>(oldView.getOptions());
        newProperties.putAll(OperationConverterUtils.extractProperties(alterViewProperties.getPropertyList()));
        CatalogView newView = new CatalogViewImpl(oldView.getOriginalQuery(), oldView.getExpandedQuery(), oldView.getSchema(), newProperties, oldView.getComment());
        return new AlterViewPropertiesOperation(viewIdentifier, newView);
    } else if (alterView instanceof SqlAlterViewAs) {
        SqlAlterViewAs alterViewAs = (SqlAlterViewAs) alterView;
        final SqlNode newQuery = alterViewAs.getNewQuery();
        CatalogView oldView = (CatalogView) baseTable;
        CatalogView newView = convertViewQuery(newQuery, Collections.emptyList(), oldView.getOptions(), oldView.getComment());
        return new AlterViewAsOperation(viewIdentifier, newView);
    } else {
        throw new ValidationException(String.format("[%s] needs to implement", alterView.toSqlString(CalciteSqlDialect.DEFAULT)));
    }
}
Also used : AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) SqlAlterViewRename(org.apache.flink.sql.parser.ddl.SqlAlterViewRename) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ResolvedCatalogTable(org.apache.flink.table.catalog.ResolvedCatalogTable) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) SqlAlterViewAs(org.apache.flink.sql.parser.ddl.SqlAlterViewAs) SqlAlterViewProperties(org.apache.flink.sql.parser.ddl.SqlAlterViewProperties) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogView(org.apache.flink.table.catalog.CatalogView) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) SqlNode(org.apache.calcite.sql.SqlNode)

Example 3 with CatalogViewImpl

use of org.apache.flink.table.catalog.CatalogViewImpl in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterViewProps.

private Operation convertAlterViewProps(CatalogBaseTable oldBaseTable, String tableName, Map<String, String> newProps) {
    ObjectIdentifier viewIdentifier = parseObjectIdentifier(tableName);
    CatalogView oldView = (CatalogView) oldBaseTable;
    Map<String, String> props = new HashMap<>(oldView.getOptions());
    props.putAll(newProps);
    CatalogView newView = new CatalogViewImpl(oldView.getOriginalQuery(), oldView.getExpandedQuery(), oldView.getSchema(), props, oldView.getComment());
    return new AlterViewPropertiesOperation(viewIdentifier, newView);
}
Also used : AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 4 with CatalogViewImpl

use of org.apache.flink.table.catalog.CatalogViewImpl in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateView.

private Operation convertCreateView(HiveParserASTNode ast) throws SemanticException {
    String[] qualTabName = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String dbDotTable = HiveParserBaseSemanticAnalyzer.getDotName(qualTabName);
    List<FieldSchema> cols = null;
    boolean ifNotExists = false;
    boolean isAlterViewAs = false;
    String comment = null;
    HiveParserASTNode selectStmt = null;
    Map<String, String> tblProps = null;
    boolean isMaterialized = ast.getToken().getType() == HiveASTParser.TOK_CREATE_MATERIALIZED_VIEW;
    if (isMaterialized) {
        handleUnsupportedOperation("MATERIALIZED VIEW is not supported");
    }
    HiveParserStorageFormat storageFormat = new HiveParserStorageFormat(conf);
    LOG.info("Creating view " + dbDotTable + " position=" + ast.getCharPositionInLine());
    int numCh = ast.getChildCount();
    for (int num = 1; num < numCh; num++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
        if (storageFormat.fillStorageFormat(child)) {
            handleUnsupportedOperation("FILE FORMAT for view is not supported");
        }
        switch(child.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.TOK_REWRITE_ENABLED:
                handleUnsupportedOperation("MATERIALIZED VIEW REWRITE is not supported");
                break;
            case HiveASTParser.TOK_ORREPLACE:
                handleUnsupportedOperation("CREATE OR REPLACE VIEW is not supported");
                break;
            case HiveASTParser.TOK_QUERY:
                selectStmt = child;
                break;
            case HiveASTParser.TOK_TABCOLNAME:
                cols = HiveParserBaseSemanticAnalyzer.getColumns(child);
                break;
            case HiveASTParser.TOK_TABLECOMMENT:
                comment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            case HiveASTParser.TOK_TABLEPROPERTIES:
                tblProps = getProps((HiveParserASTNode) child.getChild(0));
                break;
            case HiveASTParser.TOK_TABLEROWFORMAT:
                handleUnsupportedOperation("ROW FORMAT for view is not supported");
                break;
            case HiveASTParser.TOK_TABLESERIALIZER:
                handleUnsupportedOperation("SERDE for view is not supported");
                break;
            case HiveASTParser.TOK_TABLELOCATION:
                handleUnsupportedOperation("LOCATION for view is not supported");
                break;
            case HiveASTParser.TOK_VIEWPARTCOLS:
                handleUnsupportedOperation("PARTITION COLUMN for view is not supported");
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE/ALTER VIEW: " + child);
        }
    }
    if (ast.getToken().getType() == HiveASTParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
        isAlterViewAs = true;
    }
    queryState.setCommandType(HiveOperation.CREATEVIEW);
    HiveParserCreateViewInfo createViewInfo = new HiveParserCreateViewInfo(dbDotTable, cols, selectStmt);
    hiveParser.analyzeCreateView(createViewInfo, context, queryState, hiveShim);
    ObjectIdentifier viewIdentifier = parseObjectIdentifier(createViewInfo.getCompoundName());
    TableSchema schema = HiveTableUtil.createTableSchema(createViewInfo.getSchema(), Collections.emptyList(), Collections.emptySet(), null);
    Map<String, String> props = new HashMap<>();
    if (isAlterViewAs) {
        CatalogBaseTable baseTable = getCatalogBaseTable(viewIdentifier);
        props.putAll(baseTable.getOptions());
        comment = baseTable.getComment();
    } else {
        if (tblProps != null) {
            props.putAll(tblProps);
        }
    }
    CatalogView catalogView = new CatalogViewImpl(createViewInfo.getOriginalText(), createViewInfo.getExpandedText(), schema, props, comment);
    if (isAlterViewAs) {
        return new AlterViewAsOperation(viewIdentifier, catalogView);
    } else {
        return new CreateViewOperation(viewIdentifier, catalogView, ifNotExists, false);
    }
}
Also used : AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) TableSchema(org.apache.flink.table.api.TableSchema) HiveParserStorageFormat(org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

HashMap (java.util.HashMap)4 CatalogViewImpl (org.apache.flink.table.catalog.CatalogViewImpl)4 LinkedHashMap (java.util.LinkedHashMap)3 CatalogView (org.apache.flink.table.catalog.CatalogView)3 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)3 TableSchema (org.apache.flink.table.api.TableSchema)2 ValidationException (org.apache.flink.table.api.ValidationException)2 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)2 AlterViewAsOperation (org.apache.flink.table.operations.ddl.AlterViewAsOperation)2 AlterViewPropertiesOperation (org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation)2 ArrayList (java.util.ArrayList)1 Map (java.util.Map)1 SqlNode (org.apache.calcite.sql.SqlNode)1 VisibleForTesting (org.apache.flink.annotation.VisibleForTesting)1 SqlAlterViewAs (org.apache.flink.sql.parser.ddl.SqlAlterViewAs)1 SqlAlterViewProperties (org.apache.flink.sql.parser.ddl.SqlAlterViewProperties)1 SqlAlterViewRename (org.apache.flink.sql.parser.ddl.SqlAlterViewRename)1 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)1 CatalogTable (org.apache.flink.table.catalog.CatalogTable)1 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)1