Search in sources :

Example 86 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableChangeCol.

private Operation convertAlterTableChangeCol(CatalogBaseTable alteredTable, String[] qualified, HiveParserASTNode ast) throws SemanticException {
    String newComment = null;
    boolean first = false;
    String flagCol = null;
    boolean isCascade = false;
    // col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name]
    // [CASCADE|RESTRICT]
    String oldColName = ast.getChild(0).getText();
    String newColName = ast.getChild(1).getText();
    String newType = HiveParserBaseSemanticAnalyzer.getTypeStringFromAST((HiveParserASTNode) ast.getChild(2));
    int childCount = ast.getChildCount();
    for (int i = 3; i < childCount; i++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveASTParser.StringLiteral:
                newComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getText());
                break;
            case HiveASTParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
                flagCol = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
                break;
            case HiveASTParser.KW_FIRST:
                first = true;
                break;
            case HiveASTParser.TOK_CASCADE:
                isCascade = true;
                break;
            case HiveASTParser.TOK_RESTRICT:
                break;
            default:
                throw new ValidationException("Unsupported token: " + child.getToken() + " for alter table");
        }
    }
    // Validate the operation of renaming a column name.
    Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
    SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
    if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
        throw new ValidationException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
    }
    String tblName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
    CatalogTable oldTable = (CatalogTable) alteredTable;
    String oldName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(oldColName);
    String newName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(newColName);
    if (oldTable.getPartitionKeys().contains(oldName)) {
        // disallow changing partition columns
        throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
    }
    TableSchema oldSchema = oldTable.getSchema();
    TableColumn newTableColumn = TableColumn.physical(newName, HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(newType)));
    TableSchema newSchema = OperationConverterUtils.changeColumn(oldSchema, oldName, newTableColumn, first, flagCol);
    Map<String, String> props = new HashMap<>(oldTable.getOptions());
    props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
    if (isCascade) {
        props.put(ALTER_COL_CASCADE, "true");
    }
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(newSchema, oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) TableSchema(org.apache.flink.table.api.TableSchema) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogTable(org.apache.flink.table.catalog.CatalogTable) TableColumn(org.apache.flink.table.api.TableColumn) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 87 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableProps.

private Operation convertAlterTableProps(CatalogBaseTable alteredTable, String tableName, HashMap<String, String> partSpec, HiveParserASTNode ast, boolean expectView, boolean isUnset) {
    HashMap<String, String> mapProp = getProps((HiveParserASTNode) (ast.getChild(0)).getChild(0));
    // statistics
    for (Map.Entry<String, String> entry : mapProp.entrySet()) {
        // we make sure that we do not change anything if there is anything wrong.
        if (entry.getKey().equals(StatsSetupConst.ROW_COUNT) || entry.getKey().equals(StatsSetupConst.RAW_DATA_SIZE)) {
            try {
                Long.parseLong(entry.getValue());
            } catch (Exception e) {
                throw new ValidationException("AlterTable " + entry.getKey() + " failed with value " + entry.getValue());
            }
        } else {
            if (HiveOperation.ALTERTABLE_UPDATETABLESTATS.getOperationName().equals(queryState.getCommandType()) || HiveOperation.ALTERTABLE_UPDATEPARTSTATS.getOperationName().equals(queryState.getCommandType())) {
                throw new ValidationException("AlterTable UpdateStats " + entry.getKey() + " failed because the only valid keys are " + StatsSetupConst.ROW_COUNT + " and " + StatsSetupConst.RAW_DATA_SIZE);
            }
        }
    }
    if (isUnset) {
        handleUnsupportedOperation("Unset properties not supported");
    }
    if (expectView) {
        return convertAlterViewProps(alteredTable, tableName, mapProp);
    } else {
        Map<String, String> newProps = new HashMap<>();
        newProps.put(ALTER_TABLE_OP, CHANGE_TBL_PROPS.name());
        newProps.putAll(mapProp);
        return convertAlterTableProps(alteredTable, tableName, partSpec, newProps);
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) DatabaseNotExistException(org.apache.flink.table.catalog.exceptions.DatabaseNotExistException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) ValidationException(org.apache.flink.table.api.ValidationException) TableNotExistException(org.apache.flink.table.catalog.exceptions.TableNotExistException)

Example 88 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertDescribeTable.

/**
 * A query like this will generate a tree as follows "describe formatted default.maptable
 * partition (b=100) id;" TOK_TABTYPE TOK_TABNAME --> root for tablename, 2 child nodes mean DB
 * specified default maptable TOK_PARTSPEC --> root node for partition spec. else columnName
 * TOK_PARTVAL b 100 id --> root node for columnName formatted
 */
private Operation convertDescribeTable(HiveParserASTNode ast) {
    HiveParserASTNode tableTypeExpr = (HiveParserASTNode) ast.getChild(0);
    String dbName = null;
    String tableName;
    String colPath;
    Map<String, String> partSpec;
    HiveParserASTNode tableNode;
    // tablename is either TABLENAME or DBNAME.TABLENAME if db is given
    if (tableTypeExpr.getChild(0).getType() == HiveASTParser.TOK_TABNAME) {
        tableNode = (HiveParserASTNode) tableTypeExpr.getChild(0);
        if (tableNode.getChildCount() == 1) {
            tableName = tableNode.getChild(0).getText();
        } else {
            dbName = tableNode.getChild(0).getText();
            tableName = dbName + "." + tableNode.getChild(1).getText();
        }
    } else {
        throw new ValidationException(tableTypeExpr.getChild(0).getText() + " is not an expected token type");
    }
    // process the second child,if exists, node to get partition spec(s)
    partSpec = QualifiedNameUtil.getPartitionSpec(tableTypeExpr);
    // process the third child node,if exists, to get partition spec(s)
    colPath = QualifiedNameUtil.getColPath(tableTypeExpr, dbName, tableName, partSpec);
    if (partSpec != null) {
        handleUnsupportedOperation("DESCRIBE PARTITION is not supported");
    }
    if (!colPath.equals(tableName)) {
        handleUnsupportedOperation("DESCRIBE COLUMNS is not supported");
    }
    boolean isExt = false;
    boolean isFormatted = false;
    if (ast.getChildCount() == 2) {
        int descOptions = ast.getChild(1).getType();
        isExt = descOptions == HiveASTParser.KW_EXTENDED;
        isFormatted = descOptions == HiveASTParser.KW_FORMATTED;
        if (descOptions == HiveASTParser.KW_PRETTY) {
            handleUnsupportedOperation("DESCRIBE PRETTY is not supported.");
        }
    }
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tableName);
    return new DescribeTableOperation(tableIdentifier, isExt || isFormatted);
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 89 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateDatabase.

private Operation convertCreateDatabase(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
    boolean ifNotExists = false;
    String dbComment = null;
    String dbLocation = null;
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
        switch(childNode.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.TOK_DATABASECOMMENT:
                dbComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
                break;
            case HiveASTParser.TOK_DATABASEPROPERTIES:
                dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
                break;
            case HiveASTParser.TOK_DATABASELOCATION:
                dbLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(childNode.getChild(0).getText());
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE DATABASE: " + childNode);
        }
    }
    Map<String, String> props = new HashMap<>();
    if (dbProps != null) {
        props.putAll(dbProps);
    }
    if (dbLocation != null) {
        props.put(DATABASE_LOCATION_URI, dbLocation);
    }
    CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(props, dbComment);
    return new CreateDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, catalogDatabase, ifNotExists);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Example 90 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterDatabaseProperties.

private Operation convertAlterDatabaseProperties(HiveParserASTNode ast) {
    String dbName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
    Map<String, String> dbProps = null;
    for (int i = 1; i < ast.getChildCount(); i++) {
        HiveParserASTNode childNode = (HiveParserASTNode) ast.getChild(i);
        if (childNode.getToken().getType() == HiveASTParser.TOK_DATABASEPROPERTIES) {
            dbProps = getProps((HiveParserASTNode) childNode.getChild(0));
        } else {
            throw new ValidationException("Unknown AST node for ALTER DATABASE PROPERTIES: " + childNode);
        }
    }
    CatalogDatabase originDB = getDatabase(dbName);
    Map<String, String> props = new HashMap<>(originDB.getProperties());
    props.put(ALTER_DATABASE_OP, SqlAlterHiveDatabase.AlterHiveDatabaseOp.CHANGE_PROPS.name());
    props.putAll(dbProps);
    CatalogDatabase newDB = new CatalogDatabaseImpl(props, originDB.getComment());
    return new AlterDatabaseOperation(catalogManager.getCurrentCatalog(), dbName, newDB);
}
Also used : CatalogDatabase(org.apache.flink.table.catalog.CatalogDatabase) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogDatabaseImpl(org.apache.flink.table.catalog.CatalogDatabaseImpl)

Aggregations

ValidationException (org.apache.flink.table.api.ValidationException)143 DataType (org.apache.flink.table.types.DataType)25 Test (org.junit.Test)23 HashMap (java.util.HashMap)21 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)19 LogicalType (org.apache.flink.table.types.logical.LogicalType)18 TableException (org.apache.flink.table.api.TableException)17 List (java.util.List)14 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)14 QueryOperation (org.apache.flink.table.operations.QueryOperation)14 LinkedHashMap (java.util.LinkedHashMap)13 DescriptorProperties (org.apache.flink.table.descriptors.DescriptorProperties)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 Expression (org.apache.flink.table.expressions.Expression)12 TableSchema (org.apache.flink.table.api.TableSchema)11 Catalog (org.apache.flink.table.catalog.Catalog)11 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)11 ArrayList (java.util.ArrayList)10 Map (java.util.Map)10 Internal (org.apache.flink.annotation.Internal)10