Search in sources :

Example 6 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateTable.

private Operation convertCreateTable(HiveParserASTNode ast) throws SemanticException {
    String[] qualifiedTabName = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String dbDotTab = HiveParserBaseSemanticAnalyzer.getDotName(qualifiedTabName);
    String likeTableName;
    List<FieldSchema> cols = new ArrayList<>();
    List<FieldSchema> partCols = new ArrayList<>();
    List<PrimaryKey> primaryKeys = new ArrayList<>();
    List<NotNullConstraint> notNulls = new ArrayList<>();
    String comment = null;
    String location = null;
    Map<String, String> tblProps = null;
    boolean ifNotExists = false;
    boolean isExt = false;
    boolean isTemporary = false;
    HiveParserASTNode selectStmt = null;
    // regular CREATE TABLE
    final int createTable = 0;
    // CREATE TABLE LIKE ... (CTLT)
    final int ctlt = 1;
    // CREATE TABLE AS SELECT ... (CTAS)
    final int ctas = 2;
    int commandType = createTable;
    HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams rowFormatParams = new HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams();
    HiveParserStorageFormat storageFormat = new HiveParserStorageFormat(conf);
    LOG.info("Creating table " + dbDotTab + " position=" + ast.getCharPositionInLine());
    int numCh = ast.getChildCount();
    // 3) CTAS does not support partitioning (for now).
    for (int num = 1; num < numCh; num++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
        if (storageFormat.fillStorageFormat(child)) {
            continue;
        }
        switch(child.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.KW_EXTERNAL:
                isExt = true;
                break;
            case HiveASTParser.KW_TEMPORARY:
                isTemporary = true;
                break;
            case HiveASTParser.TOK_LIKETABLE:
                if (child.getChildCount() > 0) {
                    likeTableName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) child.getChild(0));
                    if (likeTableName != null) {
                        if (commandType == ctas) {
                            throw new ValidationException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
                        }
                        if (cols.size() != 0) {
                            throw new ValidationException(ErrorMsg.CTLT_COLLST_COEXISTENCE.getMsg());
                        }
                    }
                    commandType = ctlt;
                    handleUnsupportedOperation("CREATE TABLE LIKE is not supported");
                }
                break;
            case // CTAS
            HiveASTParser.TOK_QUERY:
                if (commandType == ctlt) {
                    throw new ValidationException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
                }
                if (cols.size() != 0) {
                    throw new ValidationException(ErrorMsg.CTAS_COLLST_COEXISTENCE.getMsg());
                }
                if (partCols.size() != 0) {
                    throw new ValidationException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
                }
                if (isExt) {
                    throw new ValidationException(ErrorMsg.CTAS_EXTTBL_COEXISTENCE.getMsg());
                }
                commandType = ctas;
                selectStmt = child;
                break;
            case HiveASTParser.TOK_TABCOLLIST:
                cols = HiveParserBaseSemanticAnalyzer.getColumns(child, true, primaryKeys, notNulls);
                break;
            case HiveASTParser.TOK_TABLECOMMENT:
                comment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            case HiveASTParser.TOK_TABLEPARTCOLS:
                partCols = HiveParserBaseSemanticAnalyzer.getColumns((HiveParserASTNode) child.getChild(0), false);
                break;
            case HiveASTParser.TOK_TABLEROWFORMAT:
                rowFormatParams.analyzeRowFormat(child);
                break;
            case HiveASTParser.TOK_TABLELOCATION:
                location = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            case HiveASTParser.TOK_TABLEPROPERTIES:
                tblProps = getProps((HiveParserASTNode) child.getChild(0));
                break;
            case HiveASTParser.TOK_TABLESERIALIZER:
                child = (HiveParserASTNode) child.getChild(0);
                storageFormat.setSerde(HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText()));
                if (child.getChildCount() == 2) {
                    HiveParserBaseSemanticAnalyzer.readProps((HiveParserASTNode) (child.getChild(1).getChild(0)), storageFormat.getSerdeProps());
                }
                break;
            case HiveASTParser.TOK_ALTERTABLE_BUCKETS:
                handleUnsupportedOperation("Bucketed table is not supported");
                break;
            case HiveASTParser.TOK_TABLESKEWED:
                handleUnsupportedOperation("Skewed table is not supported");
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE TABLE: " + child);
        }
    }
    if (storageFormat.getStorageHandler() != null) {
        handleUnsupportedOperation("Storage handler table is not supported");
    }
    if (commandType == createTable || commandType == ctlt) {
        queryState.setCommandType(HiveOperation.CREATETABLE);
    } else {
        queryState.setCommandType(HiveOperation.CREATETABLE_AS_SELECT);
    }
    storageFormat.fillDefaultStorageFormat(isExt, false);
    if (isTemporary) {
        if (partCols.size() > 0) {
            handleUnsupportedOperation("Partition columns are not supported on temporary tables");
        }
        handleUnsupportedOperation("Temporary hive table is not supported");
    }
    // Handle different types of CREATE TABLE command
    switch(commandType) {
        case // REGULAR CREATE TABLE DDL
        createTable:
            tblProps = addDefaultProperties(tblProps);
            return convertCreateTable(dbDotTab, isExt, ifNotExists, isTemporary, cols, partCols, comment, location, tblProps, rowFormatParams, storageFormat, primaryKeys, notNulls);
        case // create table like <tbl_name>
        ctlt:
            tblProps = addDefaultProperties(tblProps);
            throw new SemanticException("CREATE TABLE LIKE is not supported yet");
        case // create table as select
        ctas:
            tblProps = addDefaultProperties(tblProps);
            // analyze the query
            HiveParserCalcitePlanner calcitePlanner = hiveParser.createCalcitePlanner(context, queryState, hiveShim);
            calcitePlanner.setCtasCols(cols);
            RelNode queryRelNode = calcitePlanner.genLogicalPlan(selectStmt);
            // create a table to represent the dest table
            String[] dbTblName = dbDotTab.split("\\.");
            Table destTable = new Table(Table.getEmptyTable(dbTblName[0], dbTblName[1]));
            destTable.getSd().setCols(cols);
            Tuple4<ObjectIdentifier, QueryOperation, Map<String, String>, Boolean> insertOperationInfo = dmlHelper.createInsertOperationInfo(queryRelNode, destTable, Collections.emptyMap(), Collections.emptyList(), false);
            CreateTableOperation createTableOperation = convertCreateTable(dbDotTab, isExt, ifNotExists, isTemporary, cols, partCols, comment, location, tblProps, rowFormatParams, storageFormat, primaryKeys, notNulls);
            return new CreateTableASOperation(createTableOperation, insertOperationInfo.f2, insertOperationInfo.f1, insertOperationInfo.f3);
        default:
            throw new ValidationException("Unrecognized command.");
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) HiveParserRowFormatParams(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams) ArrayList(java.util.ArrayList) PrimaryKey(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.PrimaryKey) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveParserCalcitePlanner(org.apache.flink.table.planner.delegation.hive.HiveParserCalcitePlanner) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) QueryOperation(org.apache.flink.table.operations.QueryOperation) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserStorageFormat(org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) HiveParserRowFormatParams(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams) RelNode(org.apache.calcite.rel.RelNode) HiveParserBaseSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap)

Example 7 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableModifyCols.

private Operation convertAlterTableModifyCols(CatalogBaseTable alteredTable, String tblName, HiveParserASTNode ast, boolean replace) throws SemanticException {
    List<FieldSchema> newCols = HiveParserBaseSemanticAnalyzer.getColumns((HiveParserASTNode) ast.getChild(0));
    boolean isCascade = false;
    if (null != ast.getFirstChildWithType(HiveASTParser.TOK_CASCADE)) {
        isCascade = true;
    }
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
    CatalogTable oldTable = (CatalogTable) alteredTable;
    // prepare properties
    Map<String, String> props = new HashMap<>(oldTable.getOptions());
    props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
    if (isCascade) {
        props.put(ALTER_COL_CASCADE, "true");
    }
    TableSchema oldSchema = oldTable.getSchema();
    final int numPartCol = oldTable.getPartitionKeys().size();
    TableSchema.Builder builder = TableSchema.builder();
    // add existing non-part col if we're not replacing
    if (!replace) {
        List<TableColumn> nonPartCols = oldSchema.getTableColumns().subList(0, oldSchema.getFieldCount() - numPartCol);
        for (TableColumn column : nonPartCols) {
            builder.add(column);
        }
        setWatermarkAndPK(builder, oldSchema);
    }
    // add new cols
    for (FieldSchema col : newCols) {
        builder.add(TableColumn.physical(col.getName(), HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(col.getType()))));
    }
    // add part cols
    List<TableColumn> partCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount());
    for (TableColumn column : partCols) {
        builder.add(column);
    }
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(builder.build(), oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) CatalogTable(org.apache.flink.table.catalog.CatalogTable) TableColumn(org.apache.flink.table.api.TableColumn) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 8 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertDropTable.

private Operation convertDropTable(HiveParserASTNode ast, TableType expectedType) {
    String tableName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
    boolean ifExists = (ast.getFirstChildWithType(HiveASTParser.TOK_IFEXISTS) != null);
    ObjectIdentifier identifier = parseObjectIdentifier(tableName);
    CatalogBaseTable baseTable = getCatalogBaseTable(identifier, true);
    if (expectedType == TableType.VIRTUAL_VIEW) {
        if (baseTable instanceof CatalogTable) {
            throw new ValidationException("DROP VIEW for a table is not allowed");
        }
        return new DropViewOperation(identifier, ifExists, false);
    } else {
        if (baseTable instanceof CatalogView) {
            throw new ValidationException("DROP TABLE for a view is not allowed");
        }
        return new DropTableOperation(identifier, ifExists, false);
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 9 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateTable.

private CreateTableOperation convertCreateTable(String compoundName, boolean isExternal, boolean ifNotExists, boolean isTemporary, List<FieldSchema> cols, List<FieldSchema> partCols, String comment, String location, Map<String, String> tblProps, HiveParserRowFormatParams rowFormatParams, HiveParserStorageFormat storageFormat, List<PrimaryKey> primaryKeys, List<NotNullConstraint> notNullConstraints) {
    Map<String, String> props = new HashMap<>();
    if (tblProps != null) {
        props.putAll(tblProps);
    }
    markHiveConnector(props);
    // external
    if (isExternal) {
        props.put(TABLE_IS_EXTERNAL, "true");
    }
    // PK trait
    UniqueConstraint uniqueConstraint = null;
    if (primaryKeys != null && !primaryKeys.isEmpty()) {
        PrimaryKey primaryKey = primaryKeys.get(0);
        byte trait = 0;
        if (primaryKey.isEnable()) {
            trait = HiveDDLUtils.enableConstraint(trait);
        }
        if (primaryKey.isValidate()) {
            trait = HiveDDLUtils.validateConstraint(trait);
        }
        if (primaryKey.isRely()) {
            trait = HiveDDLUtils.relyConstraint(trait);
        }
        props.put(PK_CONSTRAINT_TRAIT, String.valueOf(trait));
        List<String> pkCols = primaryKeys.stream().map(PrimaryKey::getPk).collect(Collectors.toList());
        String constraintName = primaryKey.getConstraintName();
        if (constraintName == null) {
            constraintName = pkCols.stream().collect(Collectors.joining("_", "PK_", ""));
        }
        uniqueConstraint = UniqueConstraint.primaryKey(constraintName, pkCols);
    }
    // NOT NULL constraints
    List<String> notNullCols = new ArrayList<>();
    if (!notNullConstraints.isEmpty()) {
        List<String> traits = new ArrayList<>();
        for (NotNullConstraint notNull : notNullConstraints) {
            byte trait = 0;
            if (notNull.isEnable()) {
                trait = HiveDDLUtils.enableConstraint(trait);
            }
            if (notNull.isValidate()) {
                trait = HiveDDLUtils.validateConstraint(trait);
            }
            if (notNull.isRely()) {
                trait = HiveDDLUtils.relyConstraint(trait);
            }
            traits.add(String.valueOf(trait));
            notNullCols.add(notNull.getColName());
        }
        props.put(NOT_NULL_CONSTRAINT_TRAITS, String.join(COL_DELIMITER, traits));
        props.put(NOT_NULL_COLS, String.join(COL_DELIMITER, notNullCols));
    }
    // row format
    if (rowFormatParams != null) {
        encodeRowFormat(rowFormatParams, props);
    }
    // storage format
    if (storageFormat != null) {
        encodeStorageFormat(storageFormat, props);
    }
    // location
    if (location != null) {
        props.put(TABLE_LOCATION_URI, location);
    }
    ObjectIdentifier identifier = parseObjectIdentifier(compoundName);
    Set<String> notNullColSet = new HashSet<>(notNullCols);
    if (uniqueConstraint != null) {
        notNullColSet.addAll(uniqueConstraint.getColumns());
    }
    TableSchema tableSchema = HiveTableUtil.createTableSchema(cols, partCols, notNullColSet, uniqueConstraint);
    return new CreateTableOperation(identifier, new CatalogTableImpl(tableSchema, HiveCatalog.getFieldNames(partCols), props, comment), ifNotExists, isTemporary);
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) PrimaryKey(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.PrimaryKey) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) HashSet(java.util.HashSet)

Example 10 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableRename.

private Operation convertAlterTableRename(String sourceName, HiveParserASTNode ast, boolean expectView) throws SemanticException {
    String[] target = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String targetName = HiveParserBaseSemanticAnalyzer.getDotName(target);
    ObjectIdentifier objectIdentifier = parseObjectIdentifier(sourceName);
    return expectView ? new AlterViewRenameOperation(objectIdentifier, parseObjectIdentifier(targetName)) : new AlterTableRenameOperation(objectIdentifier, parseObjectIdentifier(targetName));
}
Also used : AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)184 CatalogTable (org.apache.flink.table.catalog.CatalogTable)65 UnresolvedIdentifier (org.apache.flink.table.catalog.UnresolvedIdentifier)60 ValidationException (org.apache.flink.table.api.ValidationException)59 HashMap (java.util.HashMap)57 LinkedHashMap (java.util.LinkedHashMap)48 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)42 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)41 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)32 ArrayList (java.util.ArrayList)30 Map (java.util.Map)27 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)27 CatalogPartitionSpec (org.apache.flink.table.catalog.CatalogPartitionSpec)24 NotNullConstraint (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint)24 TableException (org.apache.flink.table.api.TableException)23 TableSchema (org.apache.flink.table.api.TableSchema)22 CatalogView (org.apache.flink.table.catalog.CatalogView)21 QueryOperation (org.apache.flink.table.operations.QueryOperation)18 HiveParserASTNode (org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode)18 List (java.util.List)16