Search in sources :

Example 71 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableRename.

private Operation convertAlterTableRename(String sourceName, HiveParserASTNode ast, boolean expectView) throws SemanticException {
    String[] target = HiveParserBaseSemanticAnalyzer.getQualifiedTableName((HiveParserASTNode) ast.getChild(0));
    String targetName = HiveParserBaseSemanticAnalyzer.getDotName(target);
    ObjectIdentifier objectIdentifier = parseObjectIdentifier(sourceName);
    return expectView ? new AlterViewRenameOperation(objectIdentifier, parseObjectIdentifier(targetName)) : new AlterTableRenameOperation(objectIdentifier, parseObjectIdentifier(targetName));
}
Also used : AlterTableRenameOperation(org.apache.flink.table.operations.ddl.AlterTableRenameOperation) AlterViewRenameOperation(org.apache.flink.table.operations.ddl.AlterViewRenameOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 72 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableModifyCols.

private Operation convertAlterTableModifyCols(CatalogBaseTable alteredTable, String tblName, HiveParserASTNode ast, boolean replace) throws SemanticException {
    List<FieldSchema> newCols = HiveParserBaseSemanticAnalyzer.getColumns((HiveParserASTNode) ast.getChild(0));
    boolean isCascade = false;
    if (null != ast.getFirstChildWithType(HiveASTParser.TOK_CASCADE)) {
        isCascade = true;
    }
    ObjectIdentifier tableIdentifier = parseObjectIdentifier(tblName);
    CatalogTable oldTable = (CatalogTable) alteredTable;
    // prepare properties
    Map<String, String> props = new HashMap<>(oldTable.getOptions());
    props.put(ALTER_TABLE_OP, ALTER_COLUMNS.name());
    if (isCascade) {
        props.put(ALTER_COL_CASCADE, "true");
    }
    TableSchema oldSchema = oldTable.getSchema();
    final int numPartCol = oldTable.getPartitionKeys().size();
    TableSchema.Builder builder = TableSchema.builder();
    // add existing non-part col if we're not replacing
    if (!replace) {
        List<TableColumn> nonPartCols = oldSchema.getTableColumns().subList(0, oldSchema.getFieldCount() - numPartCol);
        for (TableColumn column : nonPartCols) {
            builder.add(column);
        }
        setWatermarkAndPK(builder, oldSchema);
    }
    // add new cols
    for (FieldSchema col : newCols) {
        builder.add(TableColumn.physical(col.getName(), HiveTypeUtil.toFlinkType(TypeInfoUtils.getTypeInfoFromTypeString(col.getType()))));
    }
    // add part cols
    List<TableColumn> partCols = oldSchema.getTableColumns().subList(oldSchema.getFieldCount() - numPartCol, oldSchema.getFieldCount());
    for (TableColumn column : partCols) {
        builder.add(column);
    }
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(builder.build(), oldTable.getPartitionKeys(), props, oldTable.getComment()));
}
Also used : TableSchema(org.apache.flink.table.api.TableSchema) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) CatalogTable(org.apache.flink.table.catalog.CatalogTable) TableColumn(org.apache.flink.table.api.TableColumn) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 73 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableAddParts.

/**
 * Add one or more partitions to a table. Useful when the data has been copied to the right
 * location by some other process.
 */
private Operation convertAlterTableAddParts(String[] qualified, CommonTree ast) {
    // ^(TOK_ALTERTABLE_ADDPARTS identifier ifNotExists?
    // alterStatementSuffixAddPartitionsElement+)
    boolean ifNotExists = ast.getChild(0).getType() == HiveASTParser.TOK_IFNOTEXISTS;
    Table tab = getTable(new ObjectPath(qualified[0], qualified[1]));
    boolean isView = tab.isView();
    validateAlterTableType(tab);
    int numCh = ast.getChildCount();
    int start = ifNotExists ? 1 : 0;
    String currentLocation = null;
    Map<String, String> currentPartSpec = null;
    // Parser has done some verification, so the order of tokens doesn't need to be verified
    // here.
    List<CatalogPartitionSpec> specs = new ArrayList<>();
    List<CatalogPartition> partitions = new ArrayList<>();
    for (int num = start; num < numCh; num++) {
        HiveParserASTNode child = (HiveParserASTNode) ast.getChild(num);
        switch(child.getToken().getType()) {
            case HiveASTParser.TOK_PARTSPEC:
                if (currentPartSpec != null) {
                    specs.add(new CatalogPartitionSpec(currentPartSpec));
                    Map<String, String> props = new HashMap<>();
                    if (currentLocation != null) {
                        props.put(TABLE_LOCATION_URI, currentLocation);
                    }
                    partitions.add(new CatalogPartitionImpl(props, null));
                    currentLocation = null;
                }
                currentPartSpec = getPartSpec(child);
                // validate reserved values
                validatePartitionValues(currentPartSpec);
                break;
            case HiveASTParser.TOK_PARTITIONLOCATION:
                // if location specified, set in partition
                if (isView) {
                    throw new ValidationException("LOCATION clause illegal for view partition");
                }
                currentLocation = HiveParserBaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
                break;
            default:
                throw new ValidationException("Unknown child: " + child);
        }
    }
    // add the last one
    if (currentPartSpec != null) {
        specs.add(new CatalogPartitionSpec(currentPartSpec));
        Map<String, String> props = new HashMap<>();
        if (currentLocation != null) {
            props.put(TABLE_LOCATION_URI, currentLocation);
        }
        partitions.add(new CatalogPartitionImpl(props, null));
    }
    ObjectIdentifier tableIdentifier = tab.getDbName() == null ? parseObjectIdentifier(tab.getTableName()) : catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(tab.getDbName(), tab.getTableName()));
    return new AddPartitionsOperation(tableIdentifier, ifNotExists, specs, partitions);
}
Also used : ObjectPath(org.apache.flink.table.catalog.ObjectPath) CatalogTable(org.apache.flink.table.catalog.CatalogTable) SqlCreateHiveTable(org.apache.flink.sql.parser.hive.ddl.SqlCreateHiveTable) Table(org.apache.hadoop.hive.ql.metadata.Table) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable) CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) NotNullConstraint(org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint) UniqueConstraint(org.apache.flink.table.api.constraints.UniqueConstraint) CatalogPartition(org.apache.flink.table.catalog.CatalogPartition) AddPartitionsOperation(org.apache.flink.table.operations.ddl.AddPartitionsOperation) CatalogPartitionSpec(org.apache.flink.table.catalog.CatalogPartitionSpec) CatalogPartitionImpl(org.apache.flink.table.catalog.CatalogPartitionImpl) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 74 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.

the class HiveParserDDLSemanticAnalyzer method convertDropFunction.

private Operation convertDropFunction(HiveParserASTNode ast) {
    // ^(TOK_DROPFUNCTION identifier ifExists? $temp?)
    String functionName = ast.getChild(0).getText();
    boolean ifExists = (ast.getFirstChildWithType(HiveASTParser.TOK_IFEXISTS) != null);
    boolean isTemporaryFunction = (ast.getFirstChildWithType(HiveASTParser.TOK_TEMPORARY) != null);
    if (isTemporaryFunction) {
        return new DropTempSystemFunctionOperation(functionName, ifExists);
    } else {
        ObjectIdentifier identifier = parseObjectIdentifier(functionName);
        return new DropCatalogFunctionOperation(identifier, ifExists, false);
    }
}
Also used : DropTempSystemFunctionOperation(org.apache.flink.table.operations.ddl.DropTempSystemFunctionOperation) DropCatalogFunctionOperation(org.apache.flink.table.operations.ddl.DropCatalogFunctionOperation) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 75 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.

the class HiveDynamicTableFactoryTest method getTableSource.

private DynamicTableSource getTableSource(String tableName) throws Exception {
    TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
    ObjectIdentifier tableIdentifier = ObjectIdentifier.of(hiveCatalog.getName(), "default", tableName);
    CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(tableIdentifier.toObjectPath());
    return FactoryUtil.createDynamicTableSource((DynamicTableSourceFactory) hiveCatalog.getFactory().orElseThrow(IllegalStateException::new), tableIdentifier, tableEnvInternal.getCatalogManager().resolveCatalogTable(catalogTable), tableEnv.getConfig().getConfiguration(), Thread.currentThread().getContextClassLoader(), false);
}
Also used : TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)185 CatalogTable (org.apache.flink.table.catalog.CatalogTable)66 UnresolvedIdentifier (org.apache.flink.table.catalog.UnresolvedIdentifier)60 ValidationException (org.apache.flink.table.api.ValidationException)59 HashMap (java.util.HashMap)57 LinkedHashMap (java.util.LinkedHashMap)48 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)42 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)41 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)33 ArrayList (java.util.ArrayList)30 Map (java.util.Map)27 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)27 CatalogPartitionSpec (org.apache.flink.table.catalog.CatalogPartitionSpec)24 NotNullConstraint (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint)24 TableException (org.apache.flink.table.api.TableException)23 TableSchema (org.apache.flink.table.api.TableSchema)23 CatalogView (org.apache.flink.table.catalog.CatalogView)21 QueryOperation (org.apache.flink.table.operations.QueryOperation)18 HiveParserASTNode (org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode)18 List (java.util.List)16