Search in sources :

Example 31 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableChangeCol.

private Operation convertAlterTableChangeCol(CatalogBaseTable alteredTable, String[] qualified, HiveParserASTNode ast) throws SemanticException {
    String newComment = null;
    boolean first = false;
    String flagCol = null;
    boolean isCascade = false;
    // col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name]
    // [CASCADE|RESTRICT]
    String oldColName = ast.getChild(0).getText();
    String newColName = ast.getChild(1).getText();
    String newType = HiveParserBaseSemanticAnalyzer.getTypeStringFromAST((HiveParserASTNode) ast.getChild(2));
    int childCount = ast.getChildCount();
    for (int i = 3; i < childCount; i++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveASTParser.StringLiteral:
                newComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getText());
                break;
            case HiveASTParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
                flagCol = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
                break;
            case HiveASTParser.KW_FIRST:
                first = true;
                break;
            case HiveASTParser.TOK_CASCADE:
                isCascade = true;
                break;
            case HiveASTParser.TOK_RESTRICT:
                break;
            default:
                throw new ValidationException("Unsupported token: " + child.getToken() + " for alter table");
        }
    }
    // Validate the operation of renaming a column name.
    Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
    SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
    if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
        throw new ValidationException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
    }
    String tblName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
    CatalogTable oldTable = (CatalogTable) alteredTable;
    String oldName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(oldColName);
    String newName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(newColName);
    if (oldTable.getPartitionKeys().contains(oldName)) {
        // disallow changing partition columns
        throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
    }
    TableSchema oldSchema = oldTable.getSchema();
    TableColumn newTableColumn = TableColumn.physical(newName, HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(newType)));
    TableSchema newSchema = OperationConverterUtils.changeColumn(oldSchema, oldName, newTableColumn, first, flagCol);
    Map<String, String> props = new HashMap<>(oldTable.getOptions());
    props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
    if (isCascade) {
        props.put(ALTER_COL_CASCADE, "true");
    }
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(newSchema, oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) TableSchema(org.apache.flink.table.api.TableSchema) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogTable(org.apache.flink.table.catalog.CatalogTable) TableColumn(org.apache.flink.table.api.TableColumn) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 32 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertDescribeTable.

/**
 * A query like this will generate a tree as follows "describe formatted default.maptable
 * partition (b=100) id;" TOK_TABTYPE TOK_TABNAME --> root for tablename, 2 child nodes mean DB
 * specified default maptable TOK_PARTSPEC --> root node for partition spec. else columnName
 * TOK_PARTVAL b 100 id --> root node for columnName formatted
 */
private Operation convertDescribeTable(HiveParserASTNode ast) {
    HiveParserASTNode tableTypeExpr = (HiveParserASTNode) ast.getChild(0);
    String dbName = null;
    String tableName;
    String colPath;
    Map<String, String> partSpec;
    HiveParserASTNode tableNode;
    // tablename is either TABLENAME or DBNAME.TABLENAME if db is given
    if (tableTypeExpr.getChild(0).getType() == HiveASTParser.TOK_TABNAME) {
        tableNode = (HiveParserASTNode) tableTypeExpr.getChild(0);
        if (tableNode.getChildCount() == 1) {
            tableName = tableNode.getChild(0).getText();
        } else {
            dbName = tableNode.getChild(0).getText();
            tableName = dbName + "." + tableNode.getChild(1).getText();
        }
    } else {
        throw new ValidationException(tableTypeExpr.getChild(0).getText() + " is not an expected token type");
    }
    // process the second child,if exists, node to get partition spec(s)
    partSpec = QualifiedNameUtil.getPartitionSpec(tableTypeExpr);
    // process the third child node,if exists, to get partition spec(s)
    colPath = QualifiedNameUtil.getColPath(tableTypeExpr, dbName, tableName, partSpec);
    if (partSpec != null) {
        handleUnsupportedOperation("DESCRIBE PARTITION is not supported");
    }
    if (!colPath.equals(tableName)) {
        handleUnsupportedOperation("DESCRIBE COLUMNS is not supported");
    }
    boolean isExt = false;
    boolean isFormatted = false;
    if (ast.getChildCount() == 2) {
        int descOptions = ast.getChild(1).getType();
        isExt = descOptions == HiveASTParser.KW_EXTENDED;
        isFormatted = descOptions == HiveASTParser.KW_FORMATTED;
        if (descOptions == HiveASTParser.KW_PRETTY) {
            handleUnsupportedOperation("DESCRIBE PRETTY is not supported.");
        }
    }
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tableName);
    return new DescribeTableOperation(tableIdentifier, isExt || isFormatted);
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 33 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateDatabase.

private Operation convertCreateDatabase(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
    boolean ifNotExists = false;
    String dbComment = null;
    String dbLocation = null;
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
        switch(childNode.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.TOK_DATABASECOMMENT:
                dbComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
                break;
            case HiveASTParser.TOK_DATABASEPROPERTIES:
                dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
                break;
            case HiveASTParser.TOK_DATABASELOCATION:
                dbLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE DATABASE: " + childNode);
        }
    }
    Map<String, String> props = new HashMap<>();
    if (dbProps != null) {
        props.putAll(dbProps);
    }
    if (dbLocation != null) {
        props.put(DATABASE_LOCATION_URI, dbLocation);
    }
    CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(props, dbComment);
    return new CreateDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, catalogDatabase, ifNotExists);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 34 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseProperties.

private Operation convertAlterDatabaseProperties(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
        if (childNode.getToken().getType() == HiveASTParser.TOK_DATABASEPROPERTIES) {
            dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
        } else {
            throw new ValidationException("Unknown AST node for ALTER DATABASE PROPERTIES: " + childNode);
        }
    }
    CatalogDatabase originDB = getDatabase(dbName);
    Map<String, String> props = new HashMap<>(originDB.getProperties());
    props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_PROPS.name());
    props.putAll(dbProps);
    CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
    return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 35 with HiveParserASTNode

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableAddParts.

/**
 * Add one or more partitions to a table. Useful when the data has been copied to the right
 * location by some other process.
 */
private Operation convertAlterTableAddParts(String[] qualified, CommonTree ast) {
    // ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists?
    // alterStatementSuffixAddPartitionsElement+)
    boolean ifNotExists = ast.getChild(0).getType() == HiveASTParser.TOK_IFNOTEXISTS;
    Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
    boolean isView = tab.isView();
    validateAlterTableType(tab);
    int numCh = ast.getChildCount();
    int start = ifNotExists ? 1 : 0;
    String currentLocation = null;
    Map<String, String> currentPartSpec = null;
    // Parser has done some verification, so the order of tokens doesn't need to be verified
    // here.
    List<CatalogPartitionSpec> specs = new ArrayList<>();
    List<CatalogPartition> partitions = new ArrayList<>();
    for (int num = start; num < numCh; num++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
        switch(child.getToken().getType()) {
            case HiveASTParser.TOK_PARTSPEC:
                if (currentPartSpec != null) {
                    specs.add(new CatalogPartitionSpec(currentPartSpec));
                    Map<String, String> props = new HashMap<>();
                    if (currentLocation != null) {
                        props.put(TABLE_LOCATION_URI, currentLocation);
                    }
                    partitions.add(new CatalogPartitionImpl(props, null));
                    currentLocation = null;
                }
                currentPartSpec = getPartSpec(child);
                // validate reserved values
                validatePartitionValues(currentPartSpec);
                break;
            case HiveASTParser.TOK_PARTITIONLOCATION:
                // if location specified, set in partition
                if (isView) {
                    throw new ValidationException("LOCATION clause illegal for view partition");
                }
                currentLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            default:
                throw new ValidationException("Unknown child: " + child);
        }
    }
    // add the last one
    if (currentPartSpec != null) {
        specs.add(new CatalogPartitionSpec(currentPartSpec));
        Map<String, String> props = new HashMap<>();
        if (currentLocation != null) {
            props.put(TABLE_LOCATION_URI, currentLocation);
        }
        partitions.add(new CatalogPartitionImpl(props, null));
    }
    ObjectIdentifier tableIdentifier = tab.getDbName() == null ? parseObjectIdentifier(tab.getTableName()) : catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(tab.getDbName(), tab.getTableName()));
    return new AddPartitionsOperation(tableIdentifier, ifNotExists, specs, partitions);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogPartition(org.apache.flink.table.catalog.CatalogPartition) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) CatalogPartitionImpl(org.apache.flink.table.catalog.CatalogPartitionImpl) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

HiveParserASTNode (org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode)38 LinkedHashMap (java.util.LinkedHashMap)18 HashMap (java.util.HashMap)15 ArrayList (java.util.ArrayList)14 RelNode (org.apache.calcite.rel.RelNode)14 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)14 ValidationException (org.apache.flink.table.api.ValidationException)10 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)10 NotNullConstraint (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint)10 HiveParserRowResolver (org.apache.flink.table.planner.delegation.hive.copy.HiveParserRowResolver)10 RexNode (org.apache.calcite.rex.RexNode)9 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)9 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)9 Map (java.util.Map)8 HiveParserTypeCheckCtx (org.apache.flink.table.planner.delegation.hive.copy.HiveParserTypeCheckCtx)6 Table (org.apache.hadoop.hive.ql.metadata.Table)6 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)5 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)5 HiveParserQBParseInfo (org.apache.flink.table.planner.delegation.hive.copy.HiveParserQBParseInfo)5 RelDataType (org.apache.calcite.rel.type.RelDataType)4