Search in sources :

Example 76 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.

the class HiveDynamicTableFactoryTest method getTableSink.

private DynamicTableSink getTableSink(String tableName) throws Exception {
    TableEnvironmentInternal tableEnvInternal = (TableEnvironmentInternal) tableEnv;
    ObjectIdentifier tableIdentifier = ObjectIdentifier.of(hiveCatalog.getName(), "default", tableName);
    CatalogTable catalogTable = (CatalogTable) hiveCatalog.getTable(tableIdentifier.toObjectPath());
    return FactoryUtil.createDynamicTableSink((DynamicTableSinkFactory) hiveCatalog.getFactory().orElseThrow(IllegalStateException::new), tableIdentifier, tableEnvInternal.getCatalogManager().resolveCatalogTable(catalogTable), tableEnv.getConfig().getConfiguration(), Thread.currentThread().getContextClassLoader(), false);
}
Also used : TableEnvironmentInternal(org.apache.flink.table.api.internal.TableEnvironmentInternal) CatalogTable(org.apache.flink.table.catalog.CatalogTable) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 77 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.

the class HiveParserDMLHelper method createInsertOperation.

public Operation createInsertOperation(HiveParserCalcitePlanner analyzer, RelNode queryRelNode) throws SemanticException {
    HiveParserQB topQB = analyzer.getQB();
    QBMetaData qbMetaData = topQB.getMetaData();
    // decide the dest table
    Map<String, Table> nameToDestTable = qbMetaData.getNameToDestTable();
    Map<String, Partition> nameToDestPart = qbMetaData.getNameToDestPartition();
    // for now we only support inserting to a single table
    Preconditions.checkState(nameToDestTable.size() <= 1 && nameToDestPart.size() <= 1, "Only support inserting to 1 table");
    Table destTable;
    String insClauseName;
    if (!nameToDestTable.isEmpty()) {
        insClauseName = nameToDestTable.keySet().iterator().next();
        destTable = nameToDestTable.values().iterator().next();
    } else if (!nameToDestPart.isEmpty()) {
        insClauseName = nameToDestPart.keySet().iterator().next();
        destTable = nameToDestPart.values().iterator().next().getTable();
    } else {
        // happens for INSERT DIRECTORY
        throw new SemanticException("INSERT DIRECTORY is not supported");
    }
    // decide static partition specs
    Map<String, String> staticPartSpec = new LinkedHashMap<>();
    if (destTable.isPartitioned()) {
        List<String> partCols = HiveCatalog.getFieldNames(destTable.getTTable().getPartitionKeys());
        if (!nameToDestPart.isEmpty()) {
            // static partition
            Partition destPart = nameToDestPart.values().iterator().next();
            Preconditions.checkState(partCols.size() == destPart.getValues().size(), "Part cols and static spec doesn't match");
            for (int i = 0; i < partCols.size(); i++) {
                staticPartSpec.put(partCols.get(i), destPart.getValues().get(i));
            }
        } else {
            // dynamic partition
            Map<String, String> spec = qbMetaData.getPartSpecForAlias(insClauseName);
            if (spec != null) {
                for (String partCol : partCols) {
                    String val = spec.get(partCol);
                    if (val != null) {
                        staticPartSpec.put(partCol, val);
                    }
                }
            }
        }
    }
    // decide whether it's overwrite
    boolean overwrite = topQB.getParseInfo().getInsertOverwriteTables().keySet().stream().map(String::toLowerCase).collect(Collectors.toSet()).contains(destTable.getDbName() + "." + destTable.getTableName());
    Tuple4<ObjectIdentifier, QueryOperation, Map<String, String>, Boolean> insertOperationInfo = createInsertOperationInfo(queryRelNode, destTable, staticPartSpec, analyzer.getDestSchemaForClause(insClauseName), overwrite);
    return new SinkModifyOperation(catalogManager.getTableOrError(insertOperationInfo.f0), insertOperationInfo.f1, insertOperationInfo.f2, insertOperationInfo.f3, Collections.emptyMap());
}
Also used : Partition(org.apache.hadoop.hive.ql.metadata.Partition) Table(org.apache.hadoop.hive.ql.metadata.Table) HiveParserQB(org.apache.flink.table.planner.delegation.hive.copy.HiveParserQB) SinkModifyOperation(org.apache.flink.table.operations.SinkModifyOperation) LinkedHashMap(java.util.LinkedHashMap) QBMetaData(org.apache.hadoop.hive.ql.parse.QBMetaData) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) PlannerQueryOperation(org.apache.flink.table.planner.operations.PlannerQueryOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation)

Example 78 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.

the class LogicalTypeJsonDeserializer method deserializeStructuredType.

private static LogicalType deserializeStructuredType(JsonNode logicalTypeNode, SerdeContext serdeContext) {
    // inline structured types have no object identifier
    if (!logicalTypeNode.has(FIELD_NAME_OBJECT_IDENTIFIER)) {
        return deserializeStructuredTypeFromPlan(logicalTypeNode, serdeContext);
    }
    // for catalog structured types
    final ObjectIdentifier identifier = ObjectIdentifierJsonDeserializer.deserialize(logicalTypeNode.get(FIELD_NAME_OBJECT_IDENTIFIER).asText(), serdeContext);
    final CatalogPlanRestore restoreStrategy = serdeContext.getConfiguration().get(TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS);
    switch(restoreStrategy) {
        case ALL:
            if (logicalTypeNode.has(FIELD_NAME_ATTRIBUTES)) {
                return deserializeStructuredTypeFromPlan(logicalTypeNode, serdeContext);
            }
            return deserializeUserDefinedTypeFromCatalog(identifier, serdeContext);
        case ALL_ENFORCED:
            return deserializeStructuredTypeFromPlan(logicalTypeNode, serdeContext);
        case IDENTIFIER:
            return deserializeUserDefinedTypeFromCatalog(identifier, serdeContext);
        default:
            throw new TableException("Unsupported catalog restore strategy.");
    }
}
Also used : TableException(org.apache.flink.table.api.TableException) CatalogPlanRestore(org.apache.flink.table.api.config.TableConfigOptions.CatalogPlanRestore) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 79 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.

the class LogicalTypeJsonDeserializer method deserializeStructuredTypeFromPlan.

private static LogicalType deserializeStructuredTypeFromPlan(JsonNode logicalTypeNode, SerdeContext serdeContext) {
    final ObjectIdentifier identifier;
    if (logicalTypeNode.has(FIELD_NAME_OBJECT_IDENTIFIER)) {
        identifier = ObjectIdentifierJsonDeserializer.deserialize(logicalTypeNode.get(FIELD_NAME_OBJECT_IDENTIFIER).asText(), serdeContext);
    } else {
        identifier = null;
    }
    final Class<?> implementationClass;
    if (logicalTypeNode.has(FIELD_NAME_IMPLEMENTATION_CLASS)) {
        implementationClass = loadClass(logicalTypeNode.get(FIELD_NAME_IMPLEMENTATION_CLASS).asText(), serdeContext, "structured type");
    } else {
        implementationClass = null;
    }
    final StructuredType.Builder builder;
    if (identifier != null && implementationClass != null) {
        builder = StructuredType.newBuilder(identifier, implementationClass);
    } else if (identifier != null) {
        builder = StructuredType.newBuilder(identifier);
    } else {
        builder = StructuredType.newBuilder(implementationClass);
    }
    if (logicalTypeNode.has(FIELD_NAME_DESCRIPTION)) {
        builder.description(logicalTypeNode.get(FIELD_NAME_FIELD_DESCRIPTION).asText());
    }
    final ArrayNode attributeNodes = (ArrayNode) logicalTypeNode.get(FIELD_NAME_ATTRIBUTES);
    final List<StructuredAttribute> attributes = new ArrayList<>();
    for (JsonNode attributeNode : attributeNodes) {
        final String attributeName = attributeNode.get(FIELD_NAME_ATTRIBUTE_NAME).asText();
        final LogicalType attributeType = deserialize(attributeNode.get(FIELD_NAME_ATTRIBUTE_TYPE), serdeContext);
        final String attributeDescription;
        if (attributeNode.has(FIELD_NAME_ATTRIBUTE_DESCRIPTION)) {
            attributeDescription = attributeNode.get(FIELD_NAME_ATTRIBUTE_DESCRIPTION).asText();
        } else {
            attributeDescription = null;
        }
        attributes.add(new StructuredAttribute(attributeName, attributeType, attributeDescription));
    }
    builder.attributes(attributes);
    if (logicalTypeNode.has(FIELD_NAME_FINAL)) {
        builder.setFinal(logicalTypeNode.get(FIELD_NAME_FINAL).asBoolean());
    }
    if (logicalTypeNode.has(FIELD_NAME_INSTANTIABLE)) {
        builder.setInstantiable(logicalTypeNode.get(FIELD_NAME_INSTANTIABLE).asBoolean());
    }
    if (logicalTypeNode.has(FIELD_NAME_COMPARISON)) {
        builder.comparison(StructuredComparison.valueOf(logicalTypeNode.get(FIELD_NAME_COMPARISON).asText()));
    }
    if (logicalTypeNode.has(FIELD_NAME_SUPER_TYPE)) {
        final StructuredType superType = (StructuredType) deserialize(logicalTypeNode.get(FIELD_NAME_SUPER_TYPE), serdeContext);
        builder.superType(superType);
    }
    return builder.build();
}
Also used : ArrayList(java.util.ArrayList) StructuredAttribute(org.apache.flink.table.types.logical.StructuredType.StructuredAttribute) LogicalType(org.apache.flink.table.types.logical.LogicalType) JsonNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode) ArrayNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier) StructuredType(org.apache.flink.table.types.logical.StructuredType)

Example 80 with ObjectIdentifier

use of org.apache.flink.table.catalog.ObjectIdentifier in project flink by splunk.

the class LogicalTypeJsonDeserializer method deserializeDistinctType.

private static LogicalType deserializeDistinctType(JsonNode logicalTypeNode, SerdeContext serdeContext) {
    final ObjectIdentifier identifier = ObjectIdentifierJsonDeserializer.deserialize(logicalTypeNode.get(FIELD_NAME_OBJECT_IDENTIFIER).asText(), serdeContext);
    final CatalogPlanRestore restoreStrategy = serdeContext.getConfiguration().get(TableConfigOptions.PLAN_RESTORE_CATALOG_OBJECTS);
    switch(restoreStrategy) {
        case ALL:
            if (logicalTypeNode.has(FIELD_NAME_SOURCE_TYPE)) {
                return deserializeDistinctTypeFromPlan(identifier, logicalTypeNode, serdeContext);
            }
            return deserializeUserDefinedTypeFromCatalog(identifier, serdeContext);
        case ALL_ENFORCED:
            return deserializeDistinctTypeFromPlan(identifier, logicalTypeNode, serdeContext);
        case IDENTIFIER:
            return deserializeUserDefinedTypeFromCatalog(identifier, serdeContext);
        default:
            throw new TableException("Unsupported catalog restore strategy.");
    }
}
Also used : TableException(org.apache.flink.table.api.TableException) CatalogPlanRestore(org.apache.flink.table.api.config.TableConfigOptions.CatalogPlanRestore) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)185 CatalogTable (org.apache.flink.table.catalog.CatalogTable)66 UnresolvedIdentifier (org.apache.flink.table.catalog.UnresolvedIdentifier)60 ValidationException (org.apache.flink.table.api.ValidationException)59 HashMap (java.util.HashMap)57 LinkedHashMap (java.util.LinkedHashMap)48 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)42 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)41 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)33 ArrayList (java.util.ArrayList)30 Map (java.util.Map)27 UniqueConstraint (org.apache.flink.table.api.constraints.UniqueConstraint)27 CatalogPartitionSpec (org.apache.flink.table.catalog.CatalogPartitionSpec)24 NotNullConstraint (org.apache.flink.table.planner.delegation.hive.copy.HiveParserBaseSemanticAnalyzer.NotNullConstraint)24 TableException (org.apache.flink.table.api.TableException)23 TableSchema (org.apache.flink.table.api.TableSchema)23 CatalogView (org.apache.flink.table.catalog.CatalogView)21 QueryOperation (org.apache.flink.table.operations.QueryOperation)18 HiveParserASTNode (org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode)18 List (java.util.List)16