Search in sources :

Example 21 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableProps.

private Operation convertAlterTableProps(CatalogBaseTable oldBaseTable, String tableName, Map<String, String> partSpec, Map<String, String> newProps) {
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tableName);
    CatalogTable oldTable = (CatalogTable) oldBaseTable;
    CatalogPartitionSpec catalogPartitionSpec = partSpec != null ? new CatalogPartitionSpec(partSpec) : null;
    CatalogPartition catalogPartition = partSpec != null ? getPartition(tableIdentifier, catalogPartitionSpec) : null;
    Map<String, String> props = new HashMap<>();
    if (catalogPartition != null) {
        props.putAll(catalogPartition.getProperties());
        props.putAll(newProps);
        return new AlterPartitionPropertiesOperation(tableIdentifier, catalogPartitionSpec, new CatalogPartitionImpl(props, catalogPartition.getComment()));
    } else {
        props.putAll(oldTable.getOptions());
        props.putAll(newProps);
        return new AlterTableOptionsOperation(tableIdentifier, oldTable.copy(props));
    }
}
Also used : AlterTableOptionsOperation(org.apache.flink.table.operations.ddl.AlterTableOptionsOperation) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) CatalogPartition(org.apache.flink.table.catalog.CatalogPartition) AlterPartitionPropertiesOperation(org.apache.flink.table.operations.ddl.AlterPartitionPropertiesOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) CatalogPartitionImpl(org.apache.flink.table.catalog.CatalogPartitionImpl)

Example 22 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableModifyCols.

private Operation convertAlterTableModifyCols(CatalogBaseTable alteredTable, String tblName, HiveParserASTNode ast, boolean replace) throws SemanticException {
    List<FieldSchema> newCols = HiveParserBaseSemanticAnalyzer.getColumns((HiveParserASTNode) ast.getChild(0));
    boolean isCascade = false;
    if (null != ast.getFirstChildWithType(HiveASTParser.TOK_CASCADE)) {
        isCascade = true;
    }
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
    CatalogTable oldTable = (CatalogTable) alteredTable;
    // prepare properties
    Map<String, String> props = new HashMap<>(oldTable.getOptions());
    props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
    if (isCascade) {
        props.put(ALTER_COL_CASCADE, "true");
    }
    TableSchema oldSchema = oldTable.getSchema();
    final int numPartCol = oldTable.getPartitionKeys().size();
    TableSchema.Builder builder = TableSchema.builder();
    // add existing non-part col if we're not replacing
    if (!replace) {
        List<TableColumn> nonPartCols = oldSchema.getTableColumns().subList(0, oldSchema.getFieldCount() - numPartCol);
        for (TableColumn column : nonPartCols) {
            builder.add(column);
        }
        setWatermarkAndPK(builder, oldSchema);
    }
    // add new cols
    for (FieldSchema col : newCols) {
        builder.add(TableColumn.physical(col.getName(), HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(col.getType()))));
    }
    // add part cols
    List<TableColumn> partCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount());
    for (TableColumn column : partCols) {
        builder.add(column);
    }
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(builder.build(), oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) CatalogTable(org.apache.flink.table.catalog.CatalogTable) TableColumn(org.apache.flink.table.api.TableColumn) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 23 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertDropTable.

private Operation convertDropTable(HiveParserASTNode ast, TableType expectedType) {
    String tableName = HiveParserBaseSemanticAnalyzer.getUnescapedName((HiveParserASTNode) ast.getChild(0));
    boolean ifExists = (ast.getFirstChildWithType(HiveASTParser.TOK_IFEXISTS) != null);
    ObjectIdentifier identifier = parseObjectIdentifier(tableName);
    CatalogBaseTable baseTable = getCatalogBaseTable(identifier, true);
    if (expectedType == TableType.VIRTUAL_VIEW) {
        if (baseTable instanceof CatalogTable) {
            throw new ValidationException("DROP VIEW for a table is not allowed");
        }
        return new DropViewOperation(identifier, ifExists, false);
    } else {
        if (baseTable instanceof CatalogView) {
            throw new ValidationException("DROP TABLE for a view is not allowed");
        }
        return new DropTableOperation(identifier, ifExists, false);
    }
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) DropViewOperation(org.apache.flink.table.operations.ddl.DropViewOperation) DropTableOperation(org.apache.flink.table.operations.ddl.DropTableOperation) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 24 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method getAlteredTable.

private CatalogBaseTable getAlteredTable(String tableName, boolean expectView) {
    ObjectIdentifier objectIdentifier = parseObjectIdentifier(tableName);
    CatalogBaseTable catalogBaseTable = getCatalogBaseTable(objectIdentifier);
    if (expectView) {
        if (catalogBaseTable instanceof CatalogTable) {
            throw new ValidationException("ALTER VIEW for a table is not allowed");
        }
    } else {
        if (catalogBaseTable instanceof CatalogView) {
            throw new ValidationException("ALTER TABLE for a view is not allowed");
        }
    }
    return catalogBaseTable;
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 25 with CatalogTable

use of org.apache.flink.table.catalog.CatalogTable in project flink by apache.

the class HiveDynamicTableFactoryTest method getTableSink.

private DynamicTableSink getTableSink(String tableName) throws Exception {
    TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
    ObjectIdentifier tableIdentifier = ObjectIdentifier.of(hiveCatalog.getName(), "default", tableName);
    CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(tableIdentifier.toObjectPath());
    return FactoryUtil.createDynamicTableSink((DynamicTableSinkFactory) hiveCatalog.getFactory().orElseThrow(IllegalStateException::new), tableIdentifier, tableEnvInternal.getCatalogManager().resolveCatalogTable(catalogTable), tableEnv.getConfig().getConfiguration(), Thread.currentThread().getContextClassLoader(), false);
}
Also used : TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

CatalogTable (org.apache.flink.table.catalog.CatalogTable)68 Test (org.junit.Test)35 HashMap (java.util.HashMap)30 CatalogTableImpl (org.apache.flink.table.catalog.CatalogTableImpl)24 TableSchema (org.apache.flink.table.api.TableSchema)17 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)17 CreateTableOperation (org.apache.flink.table.operations.ddl.CreateTableOperation)14 ValidationException (org.apache.flink.table.api.ValidationException)13 ObjectPath (org.apache.flink.table.catalog.ObjectPath)13 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)12 Operation (org.apache.flink.table.operations.Operation)12 AlterTableAddConstraintOperation (org.apache.flink.table.operations.ddl.AlterTableAddConstraintOperation)12 AlterTableDropConstraintOperation (org.apache.flink.table.operations.ddl.AlterTableDropConstraintOperation)12 AlterTableOptionsOperation (org.apache.flink.table.operations.ddl.AlterTableOptionsOperation)12 AlterTableRenameOperation (org.apache.flink.table.operations.ddl.AlterTableRenameOperation)12 ExplainOperation (org.apache.flink.table.operations.ExplainOperation)11 LoadModuleOperation (org.apache.flink.table.operations.LoadModuleOperation)11 QueryOperation (org.apache.flink.table.operations.QueryOperation)11 ShowFunctionsOperation (org.apache.flink.table.operations.ShowFunctionsOperation)11 ShowModulesOperation (org.apache.flink.table.operations.ShowModulesOperation)11