Search in sources :

Example 56 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTable.

private Operation convertAlterTable(HiveParserASTNode input) throws SemanticException {
    Operation operation = null;
    HiveParserASTNode ast = (HiveParserASTNode) input.getChild(1);
    String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) input.getChild(0));
    String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    HashMap<String, String> partSpec = null;
    HiveParserASTNode partSpecNode = (HiveParserASTNode) input.getChild(2);
    if (partSpecNode != null) {
        partSpec = getPartSpec(partSpecNode);
    }
    CatalogBaseTable alteredTable = getAlteredTable(tableName, false);
    switch(ast.getType()) {
        case HiveASTParser.TOK_ALTERTABLE_RENAME:
            operation = convertAlterTableRename(tableName, ast, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_ADDCOLS:
            operation = convertAlterTableModifyCols(alteredTable, tableName, ast, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_REPLACECOLS:
            operation = convertAlterTableModifyCols(alteredTable, tableName, ast, true);
            break;
        case HiveASTParser.TOK_ALTERTABLE_RENAMECOL:
            operation = convertAlterTableChangeCol(alteredTable, qualified, ast);
            break;
        case HiveASTParser.TOK_ALTERTABLE_ADDPARTS:
            operation = convertAlterTableAddParts(qualified, ast);
            break;
        case HiveASTParser.TOK_ALTERTABLE_DROPPARTS:
            operation = convertAlterTableDropParts(qualified, ast);
            break;
        case HiveASTParser.TOK_ALTERTABLE_PROPERTIES:
            operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_DROPPROPERTIES:
            operation = convertAlterTableProps(alteredTable, tableName, null, ast, false, true);
            break;
        case HiveASTParser.TOK_ALTERTABLE_UPDATESTATS:
            operation = convertAlterTableProps(alteredTable, tableName, partSpec, ast, false, false);
            break;
        case HiveASTParser.TOK_ALTERTABLE_FILEFORMAT:
            operation = convertAlterTableFileFormat(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_LOCATION:
            operation = convertAlterTableLocation(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_SERIALIZER:
            operation = convertAlterTableSerde(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_SERDEPROPERTIES:
            operation = convertAlterTableSerdeProps(alteredTable, ast, tableName, partSpec);
            break;
        case HiveASTParser.TOK_ALTERTABLE_TOUCH:
        case HiveASTParser.TOK_ALTERTABLE_ARCHIVE:
        case HiveASTParser.TOK_ALTERTABLE_UNARCHIVE:
        case HiveASTParser.TOK_ALTERTABLE_PARTCOLTYPE:
        case HiveASTParser.TOK_ALTERTABLE_SKEWED:
        case HiveASTParser.TOK_ALTERTABLE_EXCHANGEPARTITION:
        case HiveASTParser.TOK_ALTERTABLE_MERGEFILES:
        case HiveASTParser.TOK_ALTERTABLE_RENAMEPART:
        case HiveASTParser.TOK_ALTERTABLE_SKEWED_LOCATION:
        case HiveASTParser.TOK_ALTERTABLE_BUCKETS:
        case HiveASTParser.TOK_ALTERTABLE_CLUSTER_SORT:
        case HiveASTParser.TOK_ALTERTABLE_COMPACT:
        case HiveASTParser.TOK_ALTERTABLE_UPDATECOLSTATS:
        case HiveASTParser.TOK_ALTERTABLE_DROPCONSTRAINT:
        case HiveASTParser.TOK_ALTERTABLE_ADDCONSTRAINT:
            handleUnsupportedOperation(ast);
            break;
        default:
            throw new ValidationException("Unknown AST node for ALTER TABLE: " + ast);
    }
    return operation;
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) Operation(org.apache.flink.table.operations.Operation) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation)

Example 57 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterView.

private Operation convertAlterView(HiveParserASTNode ast) throws SemanticException {
    Operation operation = null;
    String[] qualified = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String tableName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    CatalogBaseTable alteredTable = getAlteredTable(tableName, true);
    if (ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
        // alter view as
        operation = convertCreateView(ast);
    } else {
        ast = (HiveParserASTNode) ast.getChild(1);
        switch(ast.getType()) {
            case HiveASTParser.TOK_ALTERVIEW_PROPERTIES:
                operation = convertAlterTableProps(alteredTable, tableName, null, ast, true, false);
                break;
            case HiveASTParser.TOK_ALTERVIEW_DROPPROPERTIES:
                operation = convertAlterTableProps(alteredTable, tableName, null, ast, true, true);
                break;
            case HiveASTParser.TOK_ALTERVIEW_RENAME:
                operation = convertAlterTableRename(tableName, ast, true);
                break;
            case HiveASTParser.TOK_ALTERVIEW_ADDPARTS:
            case HiveASTParser.TOK_ALTERVIEW_DROPPARTS:
                handleUnsupportedOperation("ADD/DROP PARTITION for view is not supported");
                break;
            default:
                throw new ValidationException("Unknown AST node for ALTER VIEW: " + ast);
        }
    }
    return operation;
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) DropDatabaseOperation(org.apache.flink.table.operations.ddl.DropDatabaseOperation) AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) UseDatabaseOperation(org.apache.flink.table.operations.UseDatabaseOperation) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) AlterDatabaseOperation(org.apache.flink.table.operations.ddl.AlterDatabaseOperation) HiveOperation(org.apache.hadoop.hive.ql.plan.HiveOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ShowTablesOperation(org.apache.flink.table.operations.ShowTablesOperation) DescribeTableOperation(org.apache.flink.table.operations.DescribeTableOperation) ShowFunctionsOperation(org.apache.flink.table.operations.ShowFunctionsOperation) CreateDatabaseOperation(org.apache.flink.table.operations.ddl.CreateDatabaseOperation) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) ShowPartitionsOperation(org.apache.flink.table.operations.ShowPartitionsOperation) AlterViewPropertiesOperation(org.apache.flink.table.operations.ddl.AlterViewPropertiesOperation) Operation(org.apache.flink.table.operations.Operation) DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) ShowViewsOperation(org.apache.flink.table.operations.ShowViewsOperation) ShowDatabasesOperation(org.apache.flink.table.operations.ShowDatabasesOperation) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) CreateTableASOperation(org.apache.flink.table.operations.ddl.CreateTableASOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CreateTableOperation(org.apache.flink.table.operations.ddl.CreateTableOperation) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) DropPartitionsOperation(org.apache.flink.table.operations.ddl.DropPartitionsOperation) AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) CreateCatalogFunctionOperation(org.apache.flink.table.operations.ddl.CreateCatalogFunctionOperation) CreateTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.CreateTempSystemFunctionOperation)

Example 58 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableChangeCol.

private Operation convertAlterTableChangeCol(CatalogBaseTable alteredTable, String[] qualified, HiveParserASTNode ast) throws SemanticException {
    String newComment = null;
    boolean first = false;
    String flagCol = null;
    boolean isCascade = false;
    // col_old_name col_new_name column_type [COMMENT col_comment] [FIRST|AFTER column_name]
    // [CASCADE|RESTRICT]
    String oldColName = ast.getChild(0).getText();
    String newColName = ast.getChild(1).getText();
    String newType = HiveParserBaseSemanticAnalyzer.getTypeStringFromAST((HiveParserASTNode) ast.getChild(2));
    int childCount = ast.getChildCount();
    for (int i = 3; i < childCount; i++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(i);
        switch(child.getToken().getType()) {
            case HiveASTParser.StringLiteral:
                newComment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getText());
                break;
            case HiveASTParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
                flagCol = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
                break;
            case HiveASTParser.KW_FIRST:
                first = true;
                break;
            case HiveASTParser.TOK_CASCADE:
                isCascade = true;
                break;
            case HiveASTParser.TOK_RESTRICT:
                break;
            default:
                throw new ValidationException("Unsupported token: " + child.getToken() + " for alter table");
        }
    }
    // Validate the operation of renaming a column name.
    Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
    SkewedInfo skewInfo = tab.getTTable().getSd().getSkewedInfo();
    if ((null != skewInfo) && (null != skewInfo.getSkewedColNames()) && skewInfo.getSkewedColNames().contains(oldColName)) {
        throw new ValidationException(oldColName + ErrorMsg.ALTER_TABLE_NOT_ALLOWED_RENAME_SKEWED_COLUMN.getMsg());
    }
    String tblName = HiveParserBaseSemanticAnalyzer.getDotName(qualified);
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
    CatalogTable oldTable = (CatalogTable) alteredTable;
    String oldName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(oldColName);
    String newName = HiveParserBaseSemanticAnalyzer.unescapeIdentifier(newColName);
    if (oldTable.getPartitionKeys().contains(oldName)) {
        // disallow changing partition columns
        throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
    }
    TableSchema oldSchema = oldTable.getSchema();
    TableColumn newTableColumn = TableColumn.physical(newName, HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(newType)));
    TableSchema newSchema = OperationConverterUtils.changeColumn(oldSchema, oldName, newTableColumn, first, flagCol);
    Map<String, String> props = new HashMap<>(oldTable.getOptions());
    props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
    if (isCascade) {
        props.put(ALTER_COL_CASCADE, "true");
    }
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(newSchema, oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) TableSchema(org.apache.flink.table.api.TableSchema) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogTable(org.apache.flink.table.catalog.CatalogTable) TableColumn(org.apache.flink.table.api.TableColumn) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) SkewedInfo(org.apache.hadoop.hive.metastore.api.SkewedInfo) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 59 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertCreateView.

private Operation convertCreateView(HiveParserASTNode ast) throws SemanticException {
    String[] qualTabName = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String dbDotTable = HiveParserBaseSemanticAnalyzer.getDotName(qualTabName);
    List<FieldSchema> cols = null;
    boolean ifNotExists = false;
    boolean isAlterViewAs = false;
    String comment = null;
    HiveParserASTNode selectStmt = null;
    Map<String, String> tblProps = null;
    boolean isMaterialized = ast.getToken().getType() == HiveASTParser.TOK_CREATE_MATERIALIZED_VIEW;
    if (isMaterialized) {
        handleUnsupportedOperation("MATERIALIZED VIEW is not supported");
    }
    HiveParserStorageFormat storageFormat = new HiveParserStorageFormat(conf);
    LOG.info("Creating view " + dbDotTable + " position=" + ast.getCharPositionInLine());
    int numCh = ast.getChildCount();
    for (int num = 1; num < numCh; num++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
        if (storageFormat.fillStorageFormat(child)) {
            handleUnsupportedOperation("FILE FORMAT for view is not supported");
        }
        switch(child.getToken().getType()) {
            case HiveASTParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveASTParser.TOK_REWRITE_ENABLED:
                handleUnsupportedOperation("MATERIALIZED VIEW REWRITE is not supported");
                break;
            case HiveASTParser.TOK_ORREPLACE:
                handleUnsupportedOperation("CREATE OR REPLACE VIEW is not supported");
                break;
            case HiveASTParser.TOK_QUERY:
                selectStmt = child;
                break;
            case HiveASTParser.TOK_TABCOLNAME:
                cols = HiveParserBaseSemanticAnalyzer.getColumns(child);
                break;
            case HiveASTParser.TOK_TABLECOMMENT:
                comment = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            case HiveASTParser.TOK_TABLEPROPERTIES:
                tblProps = getProps((HiveParserASTNode) child.getChild(0));
                break;
            case HiveASTParser.TOK_TABLEROWFORMAT:
                handleUnsupportedOperation("ROW FORMAT for view is not supported");
                break;
            case HiveASTParser.TOK_TABLESERIALIZER:
                handleUnsupportedOperation("SERDE for view is not supported");
                break;
            case HiveASTParser.TOK_TABLELOCATION:
                handleUnsupportedOperation("LOCATION for view is not supported");
                break;
            case HiveASTParser.TOK_VIEWPARTCOLS:
                handleUnsupportedOperation("PARTITION COLUMN for view is not supported");
                break;
            default:
                throw new ValidationException("Unknown AST node for CREATE/ALTER VIEW: " + child);
        }
    }
    if (ast.getToken().getType() == HiveASTParser.TOK_ALTERVIEW && ast.getChild(1).getType() == HiveASTParser.TOK_QUERY) {
        isAlterViewAs = true;
    }
    queryState.setCommandType(HiveOperation.CREATEVIEW);
    HiveParserCreateViewInfo createViewInfo = new HiveParserCreateViewInfo(dbDotTable, cols, selectStmt);
    hiveParser.analyzeCreateView(createViewInfo, context, queryState, hiveShim);
    ObjectIdentifier viewIdentifier = parseObjectIdentifier(createViewInfo.getCompoundName());
    TableSchema schema = HiveTableUtil.createTableSchema(createViewInfo.getSchema(), Collections.emptyList(), Collections.emptySet(), null);
    Map<String, String> props = new HashMap<>();
    if (isAlterViewAs) {
        CatalogBaseTable baseTable = getCatalogBaseTable(viewIdentifier);
        props.putAll(baseTable.getOptions());
        comment = baseTable.getComment();
    } else {
        if (tblProps != null) {
            props.putAll(tblProps);
        }
    }
    CatalogView catalogView = new CatalogViewImpl(createViewInfo.getOriginalText(), createViewInfo.getExpandedText(), schema, props, comment);
    if (isAlterViewAs) {
        return new AlterViewAsOperation(viewIdentifier, catalogView);
    } else {
        return new CreateViewOperation(viewIdentifier, catalogView, ifNotExists, false);
    }
}
Also used : AlterViewAsOperation(org.apache.flink.table.operations.ddl.AlterViewAsOperation) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) CatalogViewImpl(org.apache.flink.table.catalog.CatalogViewImpl) TableSchema(org.apache.flink.table.api.TableSchema) HiveParserStorageFormat(org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CreateViewOperation(org.apache.flink.table.operations.ddl.CreateViewOperation) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 60 with CatalogBaseTable

use of org.apache.flink.table.catalog.CatalogBaseTable in project flink by apache.

the class HiveCatalogGenericMetadataTest method testTableSchemaCompatibility.

@Test
public // NOTE: Be careful to modify this test, it is important to backward compatibility
void testTableSchemaCompatibility() throws Exception {
    catalog.createDatabase(db1, createDb(), false);
    try {
        // table with numeric types
        ObjectPath tablePath = new ObjectPath(db1, "generic1");
        Table hiveTable = org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(tablePath.getDatabaseName(), tablePath.getObjectName());
        hiveTable.setDbName(tablePath.getDatabaseName());
        hiveTable.setTableName(tablePath.getObjectName());
        setLegacyGeneric(hiveTable.getParameters());
        hiveTable.getParameters().put("flink.generic.table.schema.0.name", "ti");
        hiveTable.getParameters().put("flink.generic.table.schema.0.data-type", "TINYINT");
        hiveTable.getParameters().put("flink.generic.table.schema.1.name", "si");
        hiveTable.getParameters().put("flink.generic.table.schema.1.data-type", "SMALLINT");
        hiveTable.getParameters().put("flink.generic.table.schema.2.name", "i");
        hiveTable.getParameters().put("flink.generic.table.schema.2.data-type", "INT");
        hiveTable.getParameters().put("flink.generic.table.schema.3.name", "bi");
        hiveTable.getParameters().put("flink.generic.table.schema.3.data-type", "BIGINT");
        hiveTable.getParameters().put("flink.generic.table.schema.4.name", "f");
        hiveTable.getParameters().put("flink.generic.table.schema.4.data-type", "FLOAT");
        hiveTable.getParameters().put("flink.generic.table.schema.5.name", "d");
        hiveTable.getParameters().put("flink.generic.table.schema.5.data-type", "DOUBLE");
        hiveTable.getParameters().put("flink.generic.table.schema.6.name", "de");
        hiveTable.getParameters().put("flink.generic.table.schema.6.data-type", "DECIMAL(10, 5)");
        hiveTable.getParameters().put("flink.generic.table.schema.7.name", "cost");
        hiveTable.getParameters().put("flink.generic.table.schema.7.expr", "`d` * `bi`");
        hiveTable.getParameters().put("flink.generic.table.schema.7.data-type", "DOUBLE");
        ((HiveCatalog) catalog).client.createTable(hiveTable);
        CatalogBaseTable catalogBaseTable = catalog.getTable(tablePath);
        assertFalse(HiveCatalog.isHiveTable(catalogBaseTable.getOptions()));
        TableSchema expectedSchema = TableSchema.builder().fields(new String[] { "ti", "si", "i", "bi", "f", "d", "de" }, new DataType[] { DataTypes.TINYINT(), DataTypes.SMALLINT(), DataTypes.INT(), DataTypes.BIGINT(), DataTypes.FLOAT(), DataTypes.DOUBLE(), DataTypes.DECIMAL(10, 5) }).field("cost", DataTypes.DOUBLE(), "`d` * `bi`").build();
        assertEquals(expectedSchema, catalogBaseTable.getSchema());
        // table with character types
        tablePath = new ObjectPath(db1, "generic2");
        hiveTable = org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(tablePath.getDatabaseName(), tablePath.getObjectName());
        hiveTable.setDbName(tablePath.getDatabaseName());
        hiveTable.setTableName(tablePath.getObjectName());
        setLegacyGeneric(hiveTable.getParameters());
        hiveTable.setTableName(tablePath.getObjectName());
        hiveTable.getParameters().put("flink.generic.table.schema.0.name", "c");
        hiveTable.getParameters().put("flink.generic.table.schema.0.data-type", "CHAR(265)");
        hiveTable.getParameters().put("flink.generic.table.schema.1.name", "vc");
        hiveTable.getParameters().put("flink.generic.table.schema.1.data-type", "VARCHAR(65536)");
        hiveTable.getParameters().put("flink.generic.table.schema.2.name", "s");
        hiveTable.getParameters().put("flink.generic.table.schema.2.data-type", "VARCHAR(2147483647)");
        hiveTable.getParameters().put("flink.generic.table.schema.3.name", "b");
        hiveTable.getParameters().put("flink.generic.table.schema.3.data-type", "BINARY(1)");
        hiveTable.getParameters().put("flink.generic.table.schema.4.name", "vb");
        hiveTable.getParameters().put("flink.generic.table.schema.4.data-type", "VARBINARY(255)");
        hiveTable.getParameters().put("flink.generic.table.schema.5.name", "bs");
        hiveTable.getParameters().put("flink.generic.table.schema.5.data-type", "VARBINARY(2147483647)");
        hiveTable.getParameters().put("flink.generic.table.schema.6.name", "len");
        hiveTable.getParameters().put("flink.generic.table.schema.6.expr", "CHAR_LENGTH(`s`)");
        hiveTable.getParameters().put("flink.generic.table.schema.6.data-type", "INT");
        ((HiveCatalog) catalog).client.createTable(hiveTable);
        catalogBaseTable = catalog.getTable(tablePath);
        expectedSchema = TableSchema.builder().fields(new String[] { "c", "vc", "s", "b", "vb", "bs" }, new DataType[] { DataTypes.CHAR(265), DataTypes.VARCHAR(65536), DataTypes.STRING(), DataTypes.BINARY(1), DataTypes.VARBINARY(255), DataTypes.BYTES() }).field("len", DataTypes.INT(), "CHAR_LENGTH(`s`)").build();
        assertEquals(expectedSchema, catalogBaseTable.getSchema());
        // table with date/time types
        tablePath = new ObjectPath(db1, "generic3");
        hiveTable = org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(tablePath.getDatabaseName(), tablePath.getObjectName());
        hiveTable.setDbName(tablePath.getDatabaseName());
        hiveTable.setTableName(tablePath.getObjectName());
        setLegacyGeneric(hiveTable.getParameters());
        hiveTable.setTableName(tablePath.getObjectName());
        hiveTable.getParameters().put("flink.generic.table.schema.0.name", "dt");
        hiveTable.getParameters().put("flink.generic.table.schema.0.data-type", "DATE");
        hiveTable.getParameters().put("flink.generic.table.schema.1.name", "t");
        hiveTable.getParameters().put("flink.generic.table.schema.1.data-type", "TIME(0)");
        hiveTable.getParameters().put("flink.generic.table.schema.2.name", "ts");
        hiveTable.getParameters().put("flink.generic.table.schema.2.data-type", "TIMESTAMP(3)");
        hiveTable.getParameters().put("flink.generic.table.schema.3.name", "tstz");
        hiveTable.getParameters().put("flink.generic.table.schema.3.data-type", "TIMESTAMP(6) WITH LOCAL TIME ZONE");
        hiveTable.getParameters().put("flink.generic.table.schema.watermark.0.rowtime", "ts");
        hiveTable.getParameters().put("flink.generic.table.schema.watermark.0.strategy.data-type", "TIMESTAMP(3)");
        hiveTable.getParameters().put("flink.generic.table.schema.watermark.0.strategy.expr", "ts");
        ((HiveCatalog) catalog).client.createTable(hiveTable);
        catalogBaseTable = catalog.getTable(tablePath);
        expectedSchema = TableSchema.builder().fields(new String[] { "dt", "t", "ts", "tstz" }, new DataType[] { DataTypes.DATE(), DataTypes.TIME(), DataTypes.TIMESTAMP(3), DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE() }).watermark("ts", "ts", DataTypes.TIMESTAMP(3)).build();
        assertEquals(expectedSchema, catalogBaseTable.getSchema());
        // table with complex/misc types
        tablePath = new ObjectPath(db1, "generic4");
        hiveTable = org.apache.hadoop.hive.ql.metadata.Table.getEmptyTable(tablePath.getDatabaseName(), tablePath.getObjectName());
        hiveTable.setDbName(tablePath.getDatabaseName());
        hiveTable.setTableName(tablePath.getObjectName());
        setLegacyGeneric(hiveTable.getParameters());
        hiveTable.setTableName(tablePath.getObjectName());
        hiveTable.getParameters().put("flink.generic.table.schema.0.name", "a");
        hiveTable.getParameters().put("flink.generic.table.schema.0.data-type", "ARRAY<INT>");
        hiveTable.getParameters().put("flink.generic.table.schema.1.name", "m");
        hiveTable.getParameters().put("flink.generic.table.schema.1.data-type", "MAP<BIGINT, TIMESTAMP(6)>");
        hiveTable.getParameters().put("flink.generic.table.schema.2.name", "mul");
        hiveTable.getParameters().put("flink.generic.table.schema.2.data-type", "MULTISET<DOUBLE>");
        hiveTable.getParameters().put("flink.generic.table.schema.3.name", "r");
        hiveTable.getParameters().put("flink.generic.table.schema.3.data-type", "ROW<`f1` INT, `f2` VARCHAR(2147483647)>");
        hiveTable.getParameters().put("flink.generic.table.schema.4.name", "b");
        hiveTable.getParameters().put("flink.generic.table.schema.4.data-type", "BOOLEAN");
        hiveTable.getParameters().put("flink.generic.table.schema.5.name", "ts");
        hiveTable.getParameters().put("flink.generic.table.schema.5.data-type", "TIMESTAMP(3)");
        hiveTable.getParameters().put("flink.generic.table.schema.watermark.0.rowtime", "ts");
        hiveTable.getParameters().put("flink.generic.table.schema.watermark.0.strategy.data-type", "TIMESTAMP(3)");
        hiveTable.getParameters().put("flink.generic.table.schema.watermark.0.strategy.expr", "`ts` - INTERVAL '5' SECOND");
        ((HiveCatalog) catalog).client.createTable(hiveTable);
        catalogBaseTable = catalog.getTable(tablePath);
        expectedSchema = TableSchema.builder().fields(new String[] { "a", "m", "mul", "r", "b", "ts" }, new DataType[] { DataTypes.ARRAY(DataTypes.INT()), DataTypes.MAP(DataTypes.BIGINT(), DataTypes.TIMESTAMP()), DataTypes.MULTISET(DataTypes.DOUBLE()), DataTypes.ROW(DataTypes.FIELD("f1", DataTypes.INT()), DataTypes.FIELD("f2", DataTypes.STRING())), DataTypes.BOOLEAN(), DataTypes.TIMESTAMP(3) }).watermark("ts", "`ts` - INTERVAL '5' SECOND", DataTypes.TIMESTAMP(3)).build();
        assertEquals(expectedSchema, catalogBaseTable.getSchema());
    } finally {
        catalog.dropDatabase(db1, true, true);
    }
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) Table(org.apache.hadoop.hive.metastore.api.Table) TableSchema(org.apache.flink.table.api.TableSchema) DataType(org.apache.flink.table.types.DataType) Test(org.junit.Test)

Aggregations

CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)110 ObjectPath (org.apache.flink.table.catalog.ObjectPath)56 CatalogTable (org.apache.flink.table.catalog.CatalogTable)46 Test (org.junit.Test)46 ValidationException (org.apache.flink.table.api.ValidationException)33 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)30 CatalogView (org.apache.flink.table.catalog.CatalogView)27 TableSchema (org.apache.flink.table.api.TableSchema)25 Table (org.apache.hadoop.hive.metastore.api.Table)21 HashMap (java.util.HashMap)19 SqlCreateHiveTable (org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable)18 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)15 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)15 Map (java.util.Map)13 LinkedHashMap (java.util.LinkedHashMap)12 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)12 AlterViewAsOperation (org.apache.flink.table.operations.ddl.AlterViewAsOperation)12 DropTableOperation (org.apache.flink.table.operations.ddl.DropTableOperation)12 ArrayList (java.util.ArrayList)9 CatalogException (org.apache.flink.table.catalog.exceptions.CatalogException)9