Search in sources :

Example 36 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableProps.

private Operation convertAlterTableProps(CatalogBaseTable oldBaseTable, String tableName, Map<String, String> partSpec, Map<String, String> newProps) {
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tableName);
    CatalogTable oldTable = (CatalogTable) oldBaseTable;
    CatalogPartitionSpec catalogPartitionSpec = partSpec != null ? new CatalogPartitionSpec(partSpec) : null;
    CatalogPartition catalogPartition = partSpec != null ? getPartition(tableIdentifier, catalogPartitionSpec) : null;
    Map<String, String> props = new HashMap<>();
    if (catalogPartition != null) {
        props.putAll(catalogPartition.getProperties());
        props.putAll(newProps);
        return new AlterPartitionPropertiesOperation(tableIdentifier, catalogPartitionSpec, new CatalogPartitionImpl(props, catalogPartition.getComment()));
    } else {
        props.putAll(oldTable.getOptions());
        props.putAll(newProps);
        return new AlterTableOptionsOperation(tableIdentifier, oldTable.copy(props));
    }
}
Also used : AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogPartition(org.apache.flink.table.catalog.CatalogPartition) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) CatalogPartitionImpl(org.apache.flink.table.catalog.CatalogPartitionImpl)

Example 37 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableDropParts.

private Operation convertAlterTableDropParts(String[] qualified, HiveParserASTNode ast) {
    boolean ifExists = ast.getFirstChildWithType(HiveASTParser.TOK_IFEXISTS) != null;
    // If the drop has to fail on non-existent partitions, we cannot batch expressions.
    // That is because we actually have to check each separate expression for existence.
    // We could do a small optimization for the case where expr has all columns and all
    // operators are equality, if we assume those would always match one partition (which
    // may not be true with legacy, non-normalized column values). This is probably a
    // popular case but that's kinda hacky. Let's not do it for now.
    Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
    // hive represents drop partition specs with generic func desc, but what we need is just
    // spec maps
    List<Map<String, String>> partSpecs = new ArrayList<>();
    for (int i = 0; i < ast.getChildCount(); i++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(i);
        if (child.getType() == HiveASTParser.TOK_PARTSPEC) {
            partSpecs.add(getPartSpec(child));
        }
    }
    validateAlterTableType(tab);
    ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(qualified[0], qualified[1]));
    List<CatalogPartitionSpec> specs = partSpecs.stream().map(CatalogPartitionSpec::new).collect(Collectors.toList());
    return new DropPartitionsOperation(tableIdentifier, ifExists, specs);
}
Also used : DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ArrayList(java.util.ArrayList) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 38 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableAddParts.

/**
 * Add one or more partitions to a table. Useful when the data has been copied to the right
 * location by some other process.
 */
private Operation convertAlterTableAddParts(String[] qualified, CommonTree ast) {
    // ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists?
    // alterStatementSuffixAddPartitionsElement+)
    boolean ifNotExists = ast.getChild(0).getType() == HiveASTParser.TOK_IFNOTEXISTS;
    Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
    boolean isView = tab.isView();
    validateAlterTableType(tab);
    int numCh = ast.getChildCount();
    int start = ifNotExists ? 1 : 0;
    String currentLocation = null;
    Map<String, String> currentPartSpec = null;
    // Parser has done some verification, so the order of tokens doesn't need to be verified
    // here.
    List<CatalogPartitionSpec> specs = new ArrayList<>();
    List<CatalogPartition> partitions = new ArrayList<>();
    for (int num = start; num < numCh; num++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
        switch(child.getToken().getType()) {
            case HiveASTParser.TOK_PARTSPEC:
                if (currentPartSpec != null) {
                    specs.add(new CatalogPartitionSpec(currentPartSpec));
                    Map<String, String> props = new HashMap<>();
                    if (currentLocation != null) {
                        props.put(TABLE_LOCATION_URI, currentLocation);
                    }
                    partitions.add(new CatalogPartitionImpl(props, null));
                    currentLocation = null;
                }
                currentPartSpec = getPartSpec(child);
                // validate reserved values
                validatePartitionValues(currentPartSpec);
                break;
            case HiveASTParser.TOK_PARTITIONLOCATION:
                // if location specified, set in partition
                if (isView) {
                    throw new ValidationException("LOCATION clause illegal for view partition");
                }
                currentLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            default:
                throw new ValidationException("Unknown child: " + child);
        }
    }
    // add the last one
    if (currentPartSpec != null) {
        specs.add(new CatalogPartitionSpec(currentPartSpec));
        Map<String, String> props = new HashMap<>();
        if (currentLocation != null) {
            props.put(TABLE_LOCATION_URI, currentLocation);
        }
        partitions.add(new CatalogPartitionImpl(props, null));
    }
    ObjectIdentifier tableIdentifier = tab.getDbName() == null ? parseObjectIdentifier(tab.getTableName()) : catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(tab.getDbName(), tab.getTableName()));
    return new AddPartitionsOperation(tableIdentifier, ifNotExists, specs, partitions);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogPartition(org.apache.flink.table.catalog.CatalogPartition) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) CatalogPartitionImpl(org.apache.flink.table.catalog.CatalogPartitionImpl) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 39 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.

the class HiveParserDDLSemanticAnalyzer method convertCreateView.

private Operation convertCreateView(HiveParserASTNode ast) throws SemanticException {
    String[] qualTabName = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String dbDotTable = HiveParserBaseSemanticAnalyzer.getDotName(qualTabName);
    List<FieldSchema> cols = null;
    boolean ifNotExists = false;
    boolean isAlterViewAs = false;
    String comment = null;
    HiveParserASTNode selectStmt = null;
    Map<String, String> tblProps = null;
    boolean isMaterialized = ast.getToken().getType() == HiveASTParser.TOK_CREATE_MATERIALIZED_VIEW;
    if (isMaterialized) {
        handleUnsupportedOperation("MATERIALIZED VIEW is not supported");
    }
    HiveParserStorageFormat storageFormat = new HiveParserStorageFormat(conf);
    LOG.info("Creating view " + dbDotTable + " position=" + ast.getCharPositionInLine());
    int numCh = ast.getChildCount();
    for (int num = 1; num < numCh; num++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
        if (storageFormat.fillStorageFormat(child)) {
            handleUnsupportedOperation("FILE FORMAT for view is not supported");
        }
        switch(child.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.TOK_REWRITE_ENABLED:
                handleUnsupportedOperation("MATERIALIZED VIEW REWRITE is not supported");
                break;
            case HiveASTParser.TOK_ORREPLACE:
                handleUnsupportedOperation("CREATE OR REPLACE VIEW is not supported");
                break;
            case HiveASTParser.TOK_QUERY:
                selectStmt = child;
                break;
            case HiveASTParser.TOK_TABCOLNAME:
                cols = HiveParserBaseSemanticAnalyzer.getColumns(child);
                break;
            case HiveASTParser.TOK_TABLECOMMENT:
                comment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            case HiveASTParser.TOK_TABLEPROPERTIES:
                tblProps = getProps((HiveParserASTNode) child.getChild(0));
                break;
            case HiveASTParser.TOK_TABLEROWFORMAT:
                handleUnsupportedOperation("ROW FORMAT for view is not supported");
                break;
            case HiveASTParser.TOK_TABLESERIALIZER:
                handleUnsupportedOperation("SERDE for view is not supported");
                break;
            case HiveASTParser.TOK_TABLELOCATION:
                handleUnsupportedOperation("LOCATION for view is not supported");
                break;
            case HiveASTParser.TOK_VIEWPARTCOLS:
                handleUnsupportedOperation("PARTITION COLUMN for view is not supported");
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE/ALTER VIEW: " + child);
        }
    }
    if (ast.getToken().getType() == HiveASTParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
        isAlterViewAs = true;
    }
    queryState.setCommandType(HiveOperation.CREATEVIEW);
    HiveParserCreateViewInfo createViewInfo = new HiveParserCreateViewInfo(dbDotTable, cols, selectStmt);
    hiveParser.analyzeCreateView(createViewInfo, context, queryState, hiveShim);
    ObjectIdentifier viewIdentifier = parseObjectIdentifier(createViewInfo.getCompoundName());
    TableSchema schema = HiveTableUtil.createTableSchema(createViewInfo.getSchema(), Collections.emptyList(), Collections.emptySet(), null);
    Map<String, String> props = new HashMap<>();
    if (isAlterViewAs) {
        CatalogBaseTable baseTable = getCatalogBaseTable(viewIdentifier);
        props.putAll(baseTable.getOptions());
        comment = baseTable.getComment();
    } else {
        if (tblProps != null) {
            props.putAll(tblProps);
        }
    }
    CatalogView catalogView = new CatalogViewImpl(createViewInfo.getOriginalText(), createViewInfo.getExpandedText(), schema, props, comment);
    if (isAlterViewAs) {
        return new AlterViewAsOperation(viewIdentifier, catalogView);
    } else {
        return new CreateViewOperation(viewIdentifier, catalogView, ifNotExists, false);
    }
}
Also used : AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) TableSchema(org.apache.flink.table.api.TableSchema) HiveParserStorageFormat(org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 40 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink-mirror by flink-ci.

the class HiveParserDDLSemanticAnalyzer method convertDescribeTable.

/**
 * A query like this will generate a tree as follows "describe formatted default.maptable
 * partition (b=100) id;" TOK_TABTYPE TOK_TABNAME --> root for tablename, 2 child nodes mean DB
 * specified default maptable TOK_PARTSPEC --> root node for partition spec. else columnName
 * TOK_PARTVAL b 100 id --> root node for columnName formatted
 */
private Operation convertDescribeTable(HiveParserASTNode ast) {
    HiveParserASTNode tableTypeExpr = (HiveParserASTNode) ast.getChild(0);
    String dbName = null;
    String tableName;
    String colPath;
    Map<String, String> partSpec;
    HiveParserASTNode tableNode;
    // tablename is either TABLENAME or DBNAME.TABLENAME if db is given
    if (tableTypeExpr.getChild(0).getType() == HiveASTParser.TOK_TABNAME) {
        tableNode = (HiveParserASTNode) tableTypeExpr.getChild(0);
        if (tableNode.getChildCount() == 1) {
            tableName = tableNode.getChild(0).getText();
        } else {
            dbName = tableNode.getChild(0).getText();
            tableName = dbName + "." + tableNode.getChild(1).getText();
        }
    } else {
        throw new ValidationException(tableTypeExpr.getChild(0).getText() + " is not an expected token type");
    }
    // process the second child,if exists, node to get partition spec(s)
    partSpec = QualifiedNameUtil.getPartitionSpec(tableTypeExpr);
    // process the third child node,if exists, to get partition spec(s)
    colPath = QualifiedNameUtil.getColPath(tableTypeExpr, dbName, tableName, partSpec);
    if (partSpec != null) {
        handleUnsupportedOperation("DESCRIBE PARTITION is not supported");
    }
    if (!colPath.equals(tableName)) {
        handleUnsupportedOperation("DESCRIBE COLUMNS is not supported");
    }
    boolean isExt = false;
    boolean isFormatted = false;
    if (ast.getChildCount() == 2) {
        int descOptions = ast.getChild(1).getType();
        isExt = descOptions == HiveASTParser.KW_EXTENDED;
        isFormatted = descOptions == HiveASTParser.KW_FORMATTED;
        if (descOptions == HiveASTParser.KW_PRETTY) {
            handleUnsupportedOperation("DESCRIBE PRETTY is not supported.");
        }
    }
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tableName);
    return new DescribeTableOperation(tableIdentifier, isExt || isFormatted);
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)185 CatalogTable (org.apache.flink.table.catalog.CatalogTable)66 UnresolvedIdentifier (org.apache.flink.table.catalog.UnresolvedIdentifier)60 ValidationException (org.apache.flink.table.api.ValidationException)59 HashMap (java.util.HashMap)57 LinkedHashMap (java.util.LinkedHashMap)48 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)42 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)41 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)33 ArrayList (java.util.ArrayList)30 Map (java.util.Map)27 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)27 CatalogPartitionSpec (org.apache.flink.table.catalog.CatalogPartitionSpec)24 NotNullConstraint (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint)24 TableException (org.apache.flink.table.api.TableException)23 TableSchema (org.apache.flink.table.api.TableSchema)23 CatalogView (org.apache.flink.table.catalog.CatalogView)21 QueryOperation (org.apache.flink.table.operations.QueryOperation)18 HiveParserASTNode (org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode)18 List (java.util.List)16