Search in sources :

Example 1 with PrimaryKey

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.PrimaryKey in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateTable.

private Operation convertCreateTable(HiveParserASTNode ast) throws SemanticException {
    String[] qualifiedTabName = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String dbDotTab = HiveParserBaseSemanticAnalyzer.getDotName(qualifiedTabName);
    String likeTableName;
    List<FieldSchema> cols = new ArrayList<>();
    List<FieldSchema> partCols = new ArrayList<>();
    List<PrimaryKey> primaryKeys = new ArrayList<>();
    List<NotNullConstraint> notNulls = new ArrayList<>();
    String comment = null;
    String location = null;
    Map<String, String> tblProps = null;
    boolean ifNotExists = false;
    boolean isExt = false;
    boolean isTemporary = false;
    HiveParserASTNode selectStmt = null;
    // regular CREATE TABLE
    final int createTable = 0;
    // CREATE TABLE LIKE ... (CTLT)
    final int ctlt = 1;
    // CREATE TABLE AS SELECT ... (CTAS)
    final int ctas = 2;
    int commandType = createTable;
    HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams rowFormatParams = new HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams();
    HiveParserStorageFormat storageFormat = new HiveParserStorageFormat(conf);
    LOG.info("Creating table " + dbDotTab + " position=" + ast.getCharPositionInLine());
    int numCh = ast.getChildCount();
    // 3) CTAS does not support partitioning (for now).
    for (int num = 1; num < numCh; num++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
        if (storageFormat.fillStorageFormat(child)) {
            continue;
        }
        switch(child.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.KW_EXTERNAL:
                isExt = true;
                break;
            case HiveASTParser.KW_TEMPORARY:
                isTemporary = true;
                break;
            case HiveASTParser.TOK_LIKETABLE:
                if (child.getChildCount() > 0) {
                    likeTableName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) child.getChild(0));
                    if (likeTableName != null) {
                        if (commandType == ctas) {
                            throw new ValidationException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
                        }
                        if (cols.size() != 0) {
                            throw new ValidationException(ErrorMsg.CTLT_COLLST_COEXISTENCE.getMsg());
                        }
                    }
                    commandType = ctlt;
                    handleUnsupportedOperation("CREATE TABLE LIKE is not supported");
                }
                break;
            case // CTAS
            HiveASTParser.TOK_QUERY:
                if (commandType == ctlt) {
                    throw new ValidationException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
                }
                if (cols.size() != 0) {
                    throw new ValidationException(ErrorMsg.CTAS_COLLST_COEXISTENCE.getMsg());
                }
                if (partCols.size() != 0) {
                    throw new ValidationException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
                }
                if (isExt) {
                    throw new ValidationException(ErrorMsg.CTAS_EXTTBL_COEXISTENCE.getMsg());
                }
                commandType = ctas;
                selectStmt = child;
                break;
            case HiveASTParser.TOK_TABCOLLIST:
                cols = HiveParserBaseSemanticAnalyzer.getColumns(child, true, primaryKeys, notNulls);
                break;
            case HiveASTParser.TOK_TABLECOMMENT:
                comment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            case HiveASTParser.TOK_TABLEPARTCOLS:
                partCols = HiveParserBaseSemanticAnalyzer.getColumns((HiveParserASTNode) child.getChild(0), false);
                break;
            case HiveASTParser.TOK_TABLEROWFORMAT:
                rowFormatParams.analyzeRowFormat(child);
                break;
            case HiveASTParser.TOK_TABLELOCATION:
                location = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            case HiveASTParser.TOK_TABLEPROPERTIES:
                tblProps = getProps((HiveParserASTNode) child.getChild(0));
                break;
            case HiveASTParser.TOK_TABLESERIALIZER:
                child = (HiveParserASTNode) child.getChild(0);
                storageFormat.setSerde(HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText()));
                if (child.getChildCount() == 2) {
                    HiveParserBaseSemanticAnalyzer.readProps((HiveParserASTNode) (child.getChild(1).getChild(0)), storageFormat.getSerdeProps());
                }
                break;
            case HiveASTParser.TOK_ALTERTABLE_BUCKETS:
                handleUnsupportedOperation("Bucketed table is not supported");
                break;
            case HiveASTParser.TOK_TABLESKEWED:
                handleUnsupportedOperation("Skewed table is not supported");
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE TABLE: " + child);
        }
    }
    if (storageFormat.getStorageHandler() != null) {
        handleUnsupportedOperation("Storage handler table is not supported");
    }
    if (commandType == createTable || commandType == ctlt) {
        queryState.setCommandType(HiveOperation.CREATETABLE);
    } else {
        queryState.setCommandType(HiveOperation.CREATETABLE_AS_SELECT);
    }
    storageFormat.fillDefaultStorageFormat(isExt, false);
    if (isTemporary) {
        if (partCols.size() > 0) {
            handleUnsupportedOperation("Partition columns are not supported on temporary tables");
        }
        handleUnsupportedOperation("Temporary hive table is not supported");
    }
    // Handle different types of CREATE TABLE command
    switch(commandType) {
        case // REGULAR CREATE TABLE DDL
        createTable:
            tblProps = addDefaultProperties(tblProps);
            return convertCreateTable(dbDotTab, isExt, ifNotExists, isTemporary, cols, partCols, comment, location, tblProps, rowFormatParams, storageFormat, primaryKeys, notNulls);
        case // create table like <tbl_name>
        ctlt:
            tblProps = addDefaultProperties(tblProps);
            throw new SemanticException("CREATE TABLE LIKE is not supported yet");
        case // create table as select
        ctas:
            tblProps = addDefaultProperties(tblProps);
            // analyze the query
            HiveParserCalcitePlanner calcitePlanner = hiveParser.createCalcitePlanner(context, queryState, hiveShim);
            calcitePlanner.setCtasCols(cols);
            RelNode queryRelNode = calcitePlanner.genLogicalPlan(selectStmt);
            // create a table to represent the dest table
            String[] dbTblName = dbDotTab.split("\\.");
            Table destTable = new Table(Table.getEmptyTable(dbTblName[0], dbTblName[1]));
            destTable.getSd().setCols(cols);
            Tuple4<ObjectIdentifier, QueryOperation, Map<String, String>, Boolean> insertOperationInfo = dmlHelper.createInsertOperationInfo(queryRelNode, destTable, Collections.emptyMap(), Collections.emptyList(), false);
            CreateTableOperation createTableOperation = convertCreateTable(dbDotTab, isExt, ifNotExists, isTemporary, cols, partCols, comment, location, tblProps, rowFormatParams, storageFormat, primaryKeys, notNulls);
            return new CreateTableASOperation(createTableOperation, insertOperationInfo.f2, insertOperationInfo.f1, insertOperationInfo.f3);
        default:
            throw new ValidationException("Unrecognized command.");
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) HiveParserRowFormatParams(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams) ArrayList(java.util.ArrayList) PrimaryKey(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.PrimaryKey) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveParserCalcitePlanner(org.apache.flink.table.planner.delegation.hive.HiveParserCalcitePlanner) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) QueryOperation(org.apache.flink.table.operations.QueryOperation) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserStorageFormat(org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) HiveParserRowFormatParams(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams) RelNode(org.apache.calcite.rel.RelNode) HiveParserBaseSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap)

Example 2 with PrimaryKey

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.PrimaryKey in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateTable.

private CreateTableOperation convertCreateTable(String compoundName, boolean isExternal, boolean ifNotExists, boolean isTemporary, List<FieldSchema> cols, List<FieldSchema> partCols, String comment, String location, Map<String, String> tblProps, HiveParserRowFormatParams rowFormatParams, HiveParserStorageFormat storageFormat, List<PrimaryKey> primaryKeys, List<NotNullConstraint> notNullConstraints) {
    Map<String, String> props = new HashMap<>();
    if (tblProps != null) {
        props.putAll(tblProps);
    }
    markHiveConnector(props);
    // external
    if (isExternal) {
        props.put(TABLE_IS_EXTERNAL, "true");
    }
    // PK trait
    UniqueConstraint uniqueConstraint = null;
    if (primaryKeys != null && !primaryKeys.isEmpty()) {
        PrimaryKey primaryKey = primaryKeys.get(0);
        byte trait = 0;
        if (primaryKey.isEnable()) {
            trait = HiveDDLUtils.enableConstraint(trait);
        }
        if (primaryKey.isValidate()) {
            trait = HiveDDLUtils.validateConstraint(trait);
        }
        if (primaryKey.isRely()) {
            trait = HiveDDLUtils.relyConstraint(trait);
        }
        props.put(PK_CONSTRAINT_TRAIT, String.valueOf(trait));
        List<String> pkCols = primaryKeys.stream().map(PrimaryKey::getPk).collect(Collectors.toList());
        String constraintName = primaryKey.getConstraintName();
        if (constraintName == null) {
            constraintName = pkCols.stream().collect(Collectors.joining("_", "PK_", ""));
        }
        uniqueConstraint = UniqueConstraint.primaryKey(constraintName, pkCols);
    }
    // NOT NULL constraints
    List<String> notNullCols = new ArrayList<>();
    if (!notNullConstraints.isEmpty()) {
        List<String> traits = new ArrayList<>();
        for (NotNullConstraint notNull : notNullConstraints) {
            byte trait = 0;
            if (notNull.isEnable()) {
                trait = HiveDDLUtils.enableConstraint(trait);
            }
            if (notNull.isValidate()) {
                trait = HiveDDLUtils.validateConstraint(trait);
            }
            if (notNull.isRely()) {
                trait = HiveDDLUtils.relyConstraint(trait);
            }
            traits.add(String.valueOf(trait));
            notNullCols.add(notNull.getColName());
        }
        props.put(NOT_NULL_CONSTRAINT_TRAITS, String.join(COL_DELIMITER, traits));
        props.put(NOT_NULL_COLS, String.join(COL_DELIMITER, notNullCols));
    }
    // row format
    if (rowFormatParams != null) {
        encodeRowFormat(rowFormatParams, props);
    }
    // storage format
    if (storageFormat != null) {
        encodeStorageFormat(storageFormat, props);
    }
    // location
    if (location != null) {
        props.put(TABLE_LOCATION_URI, location);
    }
    ObjectIdentifier identifier = parseObjectIdentifier(compoundName);
    Set<String> notNullColSet = new HashSet<>(notNullCols);
    if (uniqueConstraint != null) {
        notNullColSet.addAll(uniqueConstraint.getColumns());
    }
    TableSchema tableSchema = HiveTableUtil.createTableSchema(cols, partCols, notNullColSet, uniqueConstraint);
    return new CreateTableOperation(identifier, new CatalogTableImpl(tableSchema, HiveCatalog.getFieldNames(partCols), props, comment), ifNotExists, isTemporary);
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) PrimaryKey(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.PrimaryKey) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) HashSet(java.util.HashSet)

Aggregations

ArrayList (java.util.ArrayList)2 HashMap (java.util.HashMap)2 LinkedHashMap (java.util.LinkedHashMap)2 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)2 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)2 CreateTableOperation (org.apache.flink.table.operations.ddl.CreateTableOperation)2 NotNullConstraint (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint)2 PrimaryKey (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.PrimaryKey)2 HashSet (java.util.HashSet)1 Map (java.util.Map)1 RelNode (org.apache.calcite.rel.RelNode)1 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)1 TableSchema (org.apache.flink.table.api.TableSchema)1 ValidationException (org.apache.flink.table.api.ValidationException)1 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)1 CatalogTable (org.apache.flink.table.catalog.CatalogTable)1 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)1 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)1 QueryOperation (org.apache.flink.table.operations.QueryOperation)1 CreateTableASOperation (org.apache.flink.table.operations.ddl.CreateTableASOperation)1