Search in sources :

Example 1 with UnresolvedIdentifier

use of org.apache.flink.table.catalog.UnresolvedIdentifier in project flink by apache.

the class HiveParserDMLHelper method createInsertOperationInfo.

public Tuple4<ObjectIdentifier, QueryOperation, Map<String, String>, Boolean> createInsertOperationInfo(RelNode queryRelNode, Table destTable, Map<String, String> staticPartSpec, List<String> destSchema, boolean overwrite) throws SemanticException {
    // sanity check
    Preconditions.checkArgument(queryRelNode instanceof Project || queryRelNode instanceof Sort || queryRelNode instanceof LogicalDistribution, "Expect top RelNode to be Project, Sort, or LogicalDistribution, actually got " + queryRelNode);
    if (!(queryRelNode instanceof Project)) {
        RelNode parent = ((SingleRel) queryRelNode).getInput();
        // SEL + SORT or SEL + DIST + LIMIT
        Preconditions.checkArgument(parent instanceof Project || parent instanceof LogicalDistribution, "Expect input to be a Project or LogicalDistribution, actually got " + parent);
        if (parent instanceof LogicalDistribution) {
            RelNode grandParent = ((LogicalDistribution) parent).getInput();
            Preconditions.checkArgument(grandParent instanceof Project, "Expect input of LogicalDistribution to be a Project, actually got " + grandParent);
        }
    }
    // handle dest schema, e.g. insert into dest(.,.,.) select ...
    queryRelNode = handleDestSchema((SingleRel) queryRelNode, destTable, destSchema, staticPartSpec.keySet());
    // track each target col and its expected type
    RelDataTypeFactory typeFactory = plannerContext.getTypeFactory();
    LinkedHashMap<String, RelDataType> targetColToCalcType = new LinkedHashMap<>();
    List<TypeInfo> targetHiveTypes = new ArrayList<>();
    List<FieldSchema> allCols = new ArrayList<>(destTable.getCols());
    allCols.addAll(destTable.getPartCols());
    for (FieldSchema col : allCols) {
        TypeInfo hiveType = TypeInfoUtils.getTypeInfoFromTypeString(col.getType());
        targetHiveTypes.add(hiveType);
        targetColToCalcType.put(col.getName(), HiveParserTypeConverter.convert(hiveType, typeFactory));
    }
    // add static partitions to query source
    if (!staticPartSpec.isEmpty()) {
        if (queryRelNode instanceof Project) {
            queryRelNode = replaceProjectForStaticPart((Project) queryRelNode, staticPartSpec, destTable, targetColToCalcType);
        } else if (queryRelNode instanceof Sort) {
            Sort sort = (Sort) queryRelNode;
            RelNode oldInput = sort.getInput();
            RelNode newInput;
            if (oldInput instanceof LogicalDistribution) {
                newInput = replaceDistForStaticParts((LogicalDistribution) oldInput, destTable, staticPartSpec, targetColToCalcType);
            } else {
                newInput = replaceProjectForStaticPart((Project) oldInput, staticPartSpec, destTable, targetColToCalcType);
                // we may need to shift the field collations
                final int numDynmPart = destTable.getTTable().getPartitionKeys().size() - staticPartSpec.size();
                if (!sort.getCollation().getFieldCollations().isEmpty() && numDynmPart > 0) {
                    sort.replaceInput(0, null);
                    sort = LogicalSort.create(newInput, shiftRelCollation(sort.getCollation(), (Project) oldInput, staticPartSpec.size(), numDynmPart), sort.offset, sort.fetch);
                }
            }
            sort.replaceInput(0, newInput);
            queryRelNode = sort;
        } else {
            queryRelNode = replaceDistForStaticParts((LogicalDistribution) queryRelNode, destTable, staticPartSpec, targetColToCalcType);
        }
    }
    // add type conversions
    queryRelNode = addTypeConversions(plannerContext.getCluster().getRexBuilder(), queryRelNode, new ArrayList<>(targetColToCalcType.values()), targetHiveTypes, funcConverter);
    // create identifier
    List<String> targetTablePath = Arrays.asList(destTable.getDbName(), destTable.getTableName());
    UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(targetTablePath);
    ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    return Tuple4.of(identifier, new PlannerQueryOperation(queryRelNode), staticPartSpec, overwrite);
}
Also used : PlannerQueryOperation(org.apache.flink.table.planner.operations.PlannerQueryOperation) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ArrayList(java.util.ArrayList) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) RelDataType(org.apache.calcite.rel.type.RelDataType) SingleRel(org.apache.calcite.rel.SingleRel) LogicalDistribution(org.apache.flink.table.planner.plan.nodes.hive.LogicalDistribution) TypeInfo(org.apache.hadoop.hive.serde2.typeinfo.TypeInfo) LinkedHashMap(java.util.LinkedHashMap) Project(org.apache.calcite.rel.core.Project) LogicalProject(org.apache.calcite.rel.logical.LogicalProject) RelNode(org.apache.calcite.rel.RelNode) RelDataTypeFactory(org.apache.calcite.rel.type.RelDataTypeFactory) LogicalSort(org.apache.calcite.rel.logical.LogicalSort) Sort(org.apache.calcite.rel.core.Sort) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 2 with UnresolvedIdentifier

use of org.apache.flink.table.catalog.UnresolvedIdentifier in project flink by apache.

the class TableEnvironmentImpl method dropTemporaryView.

@Override
public boolean dropTemporaryView(String path) {
    UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
    ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    try {
        catalogManager.dropTemporaryView(identifier, false);
        return true;
    } catch (ValidationException e) {
        return false;
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 3 with UnresolvedIdentifier

use of org.apache.flink.table.catalog.UnresolvedIdentifier in project flink by apache.

the class TableEnvironmentImpl method createTemporaryFunction.

@Override
public void createTemporaryFunction(String path, UserDefinedFunction functionInstance) {
    final UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
    functionCatalog.registerTemporaryCatalogFunction(unresolvedIdentifier, functionInstance, false);
}
Also used : UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier)

Example 4 with UnresolvedIdentifier

use of org.apache.flink.table.catalog.UnresolvedIdentifier in project flink by apache.

the class TableEnvironmentImpl method createTemporaryView.

@Override
public void createTemporaryView(String path, Table view) {
    Preconditions.checkNotNull(path, "Path must not be null.");
    Preconditions.checkNotNull(view, "Table view must not be null.");
    UnresolvedIdentifier identifier = getParser().parseIdentifier(path);
    createTemporaryView(identifier, view);
}
Also used : UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier)

Example 5 with UnresolvedIdentifier

use of org.apache.flink.table.catalog.UnresolvedIdentifier in project flink by apache.

the class TableEnvironmentImpl method dropTemporaryTable.

@Override
public boolean dropTemporaryTable(String path) {
    UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
    ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    try {
        catalogManager.dropTemporaryTable(identifier, false);
        return true;
    } catch (ValidationException e) {
        return false;
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) UnresolvedIdentifier(org.apache.flink.table.catalog.UnresolvedIdentifier) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Aggregations

UnresolvedIdentifier (org.apache.flink.table.catalog.UnresolvedIdentifier)25 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)20 ValidationException (org.apache.flink.table.api.ValidationException)9 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)7 CatalogTable (org.apache.flink.table.catalog.CatalogTable)5 LinkedHashMap (java.util.LinkedHashMap)4 ResolvedCatalogTable (org.apache.flink.table.catalog.ResolvedCatalogTable)4 ArrayList (java.util.ArrayList)3 HashMap (java.util.HashMap)3 List (java.util.List)3 Map (java.util.Map)3 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)3 CatalogFunction (org.apache.flink.table.catalog.CatalogFunction)3 CatalogFunctionImpl (org.apache.flink.table.catalog.CatalogFunctionImpl)3 FunctionLanguage (org.apache.flink.table.catalog.FunctionLanguage)3 Optional (java.util.Optional)2 RelHint (org.apache.calcite.rel.hint.RelHint)2 SqlIdentifier (org.apache.calcite.sql.SqlIdentifier)2 SqlNode (org.apache.calcite.sql.SqlNode)2 SqlAlterViewAs (org.apache.flink.sql.parser.ddl.SqlAlterViewAs)2