Search in sources :

Example 1 with HiveParserStorageFormat

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateTable.

private Operation convertCreateTable(HiveParserASTNode ast) throws SemanticException {
    String[] qualifiedTabName = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String dbDotTab = HiveParserBaseSemanticAnalyzer.getDotName(qualifiedTabName);
    String likeTableName;
    List<FieldSchema> cols = new ArrayList<>();
    List<FieldSchema> partCols = new ArrayList<>();
    List<PrimaryKey> primaryKeys = new ArrayList<>();
    List<NotNullConstraint> notNulls = new ArrayList<>();
    String comment = null;
    String location = null;
    Map<String, String> tblProps = null;
    boolean ifNotExists = false;
    boolean isExt = false;
    boolean isTemporary = false;
    HiveParserASTNode selectStmt = null;
    // regular CREATE TABLE
    final int createTable = 0;
    // CREATE TABLE LIKE ... (CTLT)
    final int ctlt = 1;
    // CREATE TABLE AS SELECT ... (CTAS)
    final int ctas = 2;
    int commandType = createTable;
    HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams rowFormatParams = new HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams();
    HiveParserStorageFormat storageFormat = new HiveParserStorageFormat(conf);
    LOG.info("Creating table " + dbDotTab + " position=" + ast.getCharPositionInLine());
    int numCh = ast.getChildCount();
    // 3) CTAS does not support partitioning (for now).
    for (int num = 1; num < numCh; num++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
        if (storageFormat.fillStorageFormat(child)) {
            continue;
        }
        switch(child.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.KW_EXTERNAL:
                isExt = true;
                break;
            case HiveASTParser.KW_TEMPORARY:
                isTemporary = true;
                break;
            case HiveASTParser.TOK_LIKETABLE:
                if (child.getChildCount() > 0) {
                    likeTableName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) child.getChild(0));
                    if (likeTableName != null) {
                        if (commandType == ctas) {
                            throw new ValidationException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
                        }
                        if (cols.size() != 0) {
                            throw new ValidationException(ErrorMsg.CTLT_COLLST_COEXISTENCE.getMsg());
                        }
                    }
                    commandType = ctlt;
                    handleUnsupportedOperation("CREATE TABLE LIKE is not supported");
                }
                break;
            case // CTAS
            HiveASTParser.TOK_QUERY:
                if (commandType == ctlt) {
                    throw new ValidationException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
                }
                if (cols.size() != 0) {
                    throw new ValidationException(ErrorMsg.CTAS_COLLST_COEXISTENCE.getMsg());
                }
                if (partCols.size() != 0) {
                    throw new ValidationException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
                }
                if (isExt) {
                    throw new ValidationException(ErrorMsg.CTAS_EXTTBL_COEXISTENCE.getMsg());
                }
                commandType = ctas;
                selectStmt = child;
                break;
            case HiveASTParser.TOK_TABCOLLIST:
                cols = HiveParserBaseSemanticAnalyzer.getColumns(child, true, primaryKeys, notNulls);
                break;
            case HiveASTParser.TOK_TABLECOMMENT:
                comment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            case HiveASTParser.TOK_TABLEPARTCOLS:
                partCols = HiveParserBaseSemanticAnalyzer.getColumns((HiveParserASTNode) child.getChild(0), false);
                break;
            case HiveASTParser.TOK_TABLEROWFORMAT:
                rowFormatParams.analyzeRowFormat(child);
                break;
            case HiveASTParser.TOK_TABLELOCATION:
                location = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            case HiveASTParser.TOK_TABLEPROPERTIES:
                tblProps = getProps((HiveParserASTNode) child.getChild(0));
                break;
            case HiveASTParser.TOK_TABLESERIALIZER:
                child = (HiveParserASTNode) child.getChild(0);
                storageFormat.setSerde(HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText()));
                if (child.getChildCount() == 2) {
                    HiveParserBaseSemanticAnalyzer.readProps((HiveParserASTNode) (child.getChild(1).getChild(0)), storageFormat.getSerdeProps());
                }
                break;
            case HiveASTParser.TOK_ALTERTABLE_BUCKETS:
                handleUnsupportedOperation("Bucketed table is not supported");
                break;
            case HiveASTParser.TOK_TABLESKEWED:
                handleUnsupportedOperation("Skewed table is not supported");
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE TABLE: " + child);
        }
    }
    if (storageFormat.getStorageHandler() != null) {
        handleUnsupportedOperation("Storage handler table is not supported");
    }
    if (commandType == createTable || commandType == ctlt) {
        queryState.setCommandType(HiveOperation.CREATETABLE);
    } else {
        queryState.setCommandType(HiveOperation.CREATETABLE_AS_SELECT);
    }
    storageFormat.fillDefaultStorageFormat(isExt, false);
    if (isTemporary) {
        if (partCols.size() > 0) {
            handleUnsupportedOperation("Partition columns are not supported on temporary tables");
        }
        handleUnsupportedOperation("Temporary hive table is not supported");
    }
    // Handle different types of CREATE TABLE command
    switch(commandType) {
        case // REGULAR CREATE TABLE DDL
        createTable:
            tblProps = addDefaultProperties(tblProps);
            return convertCreateTable(dbDotTab, isExt, ifNotExists, isTemporary, cols, partCols, comment, location, tblProps, rowFormatParams, storageFormat, primaryKeys, notNulls);
        case // create table like <tbl_name>
        ctlt:
            tblProps = addDefaultProperties(tblProps);
            throw new SemanticException("CREATE TABLE LIKE is not supported yet");
        case // create table as select
        ctas:
            tblProps = addDefaultProperties(tblProps);
            // analyze the query
            HiveParserCalcitePlanner calcitePlanner = hiveParser.createCalcitePlanner(context, queryState, hiveShim);
            calcitePlanner.setCtasCols(cols);
            RelNode queryRelNode = calcitePlanner.genLogicalPlan(selectStmt);
            // create a table to represent the dest table
            String[] dbTblName = dbDotTab.split("\\.");
            Table destTable = new Table(Table.getEmptyTable(dbTblName[0], dbTblName[1]));
            destTable.getSd().setCols(cols);
            Tuple4<ObjectIdentifier, QueryOperation, Map<String, String>, Boolean> insertOperationInfo = dmlHelper.createInsertOperationInfo(queryRelNode, destTable, Collections.emptyMap(), Collections.emptyList(), false);
            CreateTableOperation createTableOperation = convertCreateTable(dbDotTab, isExt, ifNotExists, isTemporary, cols, partCols, comment, location, tblProps, rowFormatParams, storageFormat, primaryKeys, notNulls);
            return new CreateTableASOperation(createTableOperation, insertOperationInfo.f2, insertOperationInfo.f1, insertOperationInfo.f3);
        default:
            throw new ValidationException("Unrecognized command.");
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) HiveParserRowFormatParams(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams) ArrayList(java.util.ArrayList) PrimaryKey(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.PrimaryKey) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HiveParserCalcitePlanner(org.apache.flink.table.planner.delegation.hive.HiveParserCalcitePlanner) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) QueryOperation(org.apache.flink.table.operations.QueryOperation) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserStorageFormat(org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) HiveParserRowFormatParams(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.HiveParserRowFormatParams) RelNode(org.apache.calcite.rel.RelNode) HiveParserBaseSemanticAnalyzer(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap)

Example 2 with HiveParserStorageFormat

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableFileFormat.

private Operation convertAlterTableFileFormat(CatalogBaseTable alteredTable, HiveParserASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
    HiveParserStorageFormat format = new HiveParserStorageFormat(conf);
    HiveParserASTNode child = (HiveParserASTNode) ast.getChild(0);
    if (!format.fillStorageFormat(child)) {
        throw new ValidationException("Unknown AST node for ALTER TABLE FILEFORMAT: " + child);
    }
    Map<String, String> newProps = new HashMap<>();
    newProps.put(ALTER_TABLE_OP, CHANGE_FILE_FORMAT.name());
    newProps.put(STORED_AS_FILE_FORMAT, format.getGenericName());
    return convertAlterTableProps(alteredTable, tableName, partSpec, newProps);
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) HiveParserStorageFormat(org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap)

Example 3 with HiveParserStorageFormat

use of org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateView.

private Operation convertCreateView(HiveParserASTNode ast) throws SemanticException {
    String[] qualTabName = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String dbDotTable = HiveParserBaseSemanticAnalyzer.getDotName(qualTabName);
    List<FieldSchema> cols = null;
    boolean ifNotExists = false;
    boolean isAlterViewAs = false;
    String comment = null;
    HiveParserASTNode selectStmt = null;
    Map<String, String> tblProps = null;
    boolean isMaterialized = ast.getToken().getType() == HiveASTParser.TOK_CREATE_MATERIALIZED_VIEW;
    if (isMaterialized) {
        handleUnsupportedOperation("MATERIALIZED VIEW is not supported");
    }
    HiveParserStorageFormat storageFormat = new HiveParserStorageFormat(conf);
    LOG.info("Creating view " + dbDotTable + " position=" + ast.getCharPositionInLine());
    int numCh = ast.getChildCount();
    for (int num = 1; num < numCh; num++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
        if (storageFormat.fillStorageFormat(child)) {
            handleUnsupportedOperation("FILE FORMAT for view is not supported");
        }
        switch(child.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.TOK_REWRITE_ENABLED:
                handleUnsupportedOperation("MATERIALIZED VIEW REWRITE is not supported");
                break;
            case HiveASTParser.TOK_ORREPLACE:
                handleUnsupportedOperation("CREATE OR REPLACE VIEW is not supported");
                break;
            case HiveASTParser.TOK_QUERY:
                selectStmt = child;
                break;
            case HiveASTParser.TOK_TABCOLNAME:
                cols = HiveParserBaseSemanticAnalyzer.getColumns(child);
                break;
            case HiveASTParser.TOK_TABLECOMMENT:
                comment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            case HiveASTParser.TOK_TABLEPROPERTIES:
                tblProps = getProps((HiveParserASTNode) child.getChild(0));
                break;
            case HiveASTParser.TOK_TABLEROWFORMAT:
                handleUnsupportedOperation("ROW FORMAT for view is not supported");
                break;
            case HiveASTParser.TOK_TABLESERIALIZER:
                handleUnsupportedOperation("SERDE for view is not supported");
                break;
            case HiveASTParser.TOK_TABLELOCATION:
                handleUnsupportedOperation("LOCATION for view is not supported");
                break;
            case HiveASTParser.TOK_VIEWPARTCOLS:
                handleUnsupportedOperation("PARTITION COLUMN for view is not supported");
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE/ALTER VIEW: " + child);
        }
    }
    if (ast.getToken().getType() == HiveASTParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
        isAlterViewAs = true;
    }
    queryState.setCommandType(HiveOperation.CREATEVIEW);
    HiveParserCreateViewInfo createViewInfo = new HiveParserCreateViewInfo(dbDotTable, cols, selectStmt);
    hiveParser.analyzeCreateView(createViewInfo, context, queryState, hiveShim);
    ObjectIdentifier viewIdentifier = parseObjectIdentifier(createViewInfo.getCompoundName());
    TableSchema schema = HiveTableUtil.createTableSchema(createViewInfo.getSchema(), Collections.emptyList(), Collections.emptySet(), null);
    Map<String, String> props = new HashMap<>();
    if (isAlterViewAs) {
        CatalogBaseTable baseTable = getCatalogBaseTable(viewIdentifier);
        props.putAll(baseTable.getOptions());
        comment = baseTable.getComment();
    } else {
        if (tblProps != null) {
            props.putAll(tblProps);
        }
    }
    CatalogView catalogView = new CatalogViewImpl(createViewInfo.getOriginalText(), createViewInfo.getExpandedText(), schema, props, comment);
    if (isAlterViewAs) {
        return new AlterViewAsOperation(viewIdentifier, catalogView);
    } else {
        return new CreateViewOperation(viewIdentifier, catalogView, ifNotExists, false);
    }
}
Also used : AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) TableSchema(org.apache.flink.table.api.TableSchema) HiveParserStorageFormat(org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

HashMap (java.util.HashMap)3 LinkedHashMap (java.util.LinkedHashMap)3 ValidationException (org.apache.flink.table.api.ValidationException)3 HiveParserASTNode (org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode)3 HiveParserStorageFormat (org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat)3 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)2 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)2 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)2 NotNullConstraint (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint)2 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)2 ArrayList (java.util.ArrayList)1 Map (java.util.Map)1 RelNode (org.apache.calcite.rel.RelNode)1 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)1 TableSchema (org.apache.flink.table.api.TableSchema)1 CatalogTable (org.apache.flink.table.catalog.CatalogTable)1 CatalogView (org.apache.flink.table.catalog.CatalogView)1 CatalogViewImpl (org.apache.flink.table.catalog.CatalogViewImpl)1 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)1 QueryOperation (org.apache.flink.table.operations.QueryOperation)1