Search in sources :

Example 41 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class CatalogSourceTable method computeContextResolvedTable.

private ContextResolvedTable computeContextResolvedTable(FlinkContext context, Map<String, String> hintedOptions) {
    ContextResolvedTable contextResolvedTable = schemaTable.getContextResolvedTable();
    if (hintedOptions.isEmpty()) {
        return contextResolvedTable;
    }
    final ReadableConfig config = context.getTableConfig().getConfiguration();
    if (!config.get(TableConfigOptions.TABLE_DYNAMIC_TABLE_OPTIONS_ENABLED)) {
        throw new ValidationException(String.format("The '%s' hint is allowed only when the config option '%s' is set to true.", FlinkHints.HINT_NAME_OPTIONS, TableConfigOptions.TABLE_DYNAMIC_TABLE_OPTIONS_ENABLED.key()));
    }
    if (contextResolvedTable.getResolvedTable().getTableKind() == TableKind.VIEW) {
        throw new ValidationException(String.format("View '%s' cannot be enriched with new options. " + "Hints can only be applied to tables.", contextResolvedTable.getIdentifier()));
    }
    return contextResolvedTable.copy(FlinkHints.mergeTableOptions(hintedOptions, contextResolvedTable.getResolvedTable().getOptions()));
}
Also used : ReadableConfig(org.apache.flink.configuration.ReadableConfig) ValidationException(org.apache.flink.table.api.ValidationException) ContextResolvedTable(org.apache.flink.table.catalog.ContextResolvedTable)

Example 42 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class TypeInferenceUtil method inferOutputType.

/**
 * Infers an output type using the given {@link TypeStrategy}. It assumes that input arguments
 * have been adapted before if necessary.
 */
public static DataType inferOutputType(CallContext callContext, TypeStrategy outputTypeStrategy) {
    final Optional<DataType> potentialOutputType = outputTypeStrategy.inferType(callContext);
    if (!potentialOutputType.isPresent()) {
        throw new ValidationException("Could not infer an output type for the given arguments.");
    }
    final DataType outputType = potentialOutputType.get();
    if (isUnknown(outputType)) {
        throw new ValidationException("Could not infer an output type for the given arguments. Untyped NULL received.");
    }
    return outputType;
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) DataType(org.apache.flink.table.types.DataType)

Example 43 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class FlinkCalciteSqlValidator method validateJoin.

@Override
protected void validateJoin(SqlJoin join, SqlValidatorScope scope) {
    // temporarily forbid the common predicates until the problem is fixed (see FLINK-7865).
    if (join.getJoinType() == JoinType.LEFT && SqlUtil.stripAs(join.getRight()).getKind() == SqlKind.COLLECTION_TABLE) {
        SqlNode right = SqlUtil.stripAs(join.getRight());
        if (right instanceof SqlBasicCall) {
            SqlBasicCall call = (SqlBasicCall) right;
            SqlNode operand0 = call.operand(0);
            if (operand0 instanceof SqlBasicCall && ((SqlBasicCall) operand0).getOperator() instanceof SqlWindowTableFunction) {
                return;
            }
        }
        final SqlNode condition = join.getCondition();
        if (condition != null && (!SqlUtil.isLiteral(condition) || ((SqlLiteral) condition).getValueAs(Boolean.class) != Boolean.TRUE)) {
            throw new ValidationException(String.format("Left outer joins with a table function do not accept a predicate such as %s. " + "Only literal TRUE is accepted.", condition));
        }
    }
    super.validateJoin(join, scope);
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) SqlBasicCall(org.apache.calcite.sql.SqlBasicCall) SqlLiteral(org.apache.calcite.sql.SqlLiteral) SqlWindowTableFunction(org.apache.calcite.sql.SqlWindowTableFunction) SqlNode(org.apache.calcite.sql.SqlNode)

Example 44 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class FunctionCatalogOperatorTable method verifyFunctionKind.

/**
 * Verifies which kinds of functions are allowed to be returned from the catalog given the
 * context information.
 */
private boolean verifyFunctionKind(@Nullable SqlFunctionCategory category, ContextResolvedFunction resolvedFunction) {
    final FunctionDefinition definition = resolvedFunction.getDefinition();
    // built-in functions without implementation are handled separately
    if (definition instanceof BuiltInFunctionDefinition) {
        final BuiltInFunctionDefinition builtInFunction = (BuiltInFunctionDefinition) definition;
        if (!builtInFunction.hasRuntimeImplementation()) {
            return false;
        }
    }
    final FunctionKind kind = definition.getKind();
    if (kind == FunctionKind.TABLE) {
        return true;
    } else if (kind == FunctionKind.SCALAR || kind == FunctionKind.AGGREGATE || kind == FunctionKind.TABLE_AGGREGATE) {
        if (category != null && category.isTableFunction()) {
            throw new ValidationException(String.format("Function '%s' cannot be used as a table function.", resolvedFunction));
        }
        return true;
    }
    return false;
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) BuiltInFunctionDefinition(org.apache.flink.table.functions.BuiltInFunctionDefinition) BuiltInFunctionDefinition(org.apache.flink.table.functions.BuiltInFunctionDefinition) AggregateFunctionDefinition(org.apache.flink.table.functions.AggregateFunctionDefinition) TableFunctionDefinition(org.apache.flink.table.functions.TableFunctionDefinition) ScalarFunctionDefinition(org.apache.flink.table.functions.ScalarFunctionDefinition) FunctionDefinition(org.apache.flink.table.functions.FunctionDefinition) FunctionKind(org.apache.flink.table.functions.FunctionKind)

Example 45 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class DynamicSinkUtils method validateAndApplyMetadata.

private static void validateAndApplyMetadata(String tableDebugName, DynamicTableSink sink, ResolvedSchema schema, List<SinkAbilitySpec> sinkAbilitySpecs) {
    final List<Column> columns = schema.getColumns();
    final List<Integer> metadataColumns = extractPersistedMetadataColumns(schema);
    if (metadataColumns.isEmpty()) {
        return;
    }
    if (!(sink instanceof SupportsWritingMetadata)) {
        throw new ValidationException(String.format("Table '%s' declares persistable metadata columns, but the underlying %s " + "doesn't implement the %s interface. If the column should not " + "be persisted, it can be declared with the VIRTUAL keyword.", tableDebugName, DynamicTableSink.class.getSimpleName(), SupportsWritingMetadata.class.getSimpleName()));
    }
    final Map<String, DataType> metadataMap = ((SupportsWritingMetadata) sink).listWritableMetadata();
    metadataColumns.forEach(pos -> {
        final MetadataColumn metadataColumn = (MetadataColumn) columns.get(pos);
        final String metadataKey = metadataColumn.getMetadataKey().orElse(metadataColumn.getName());
        final LogicalType metadataType = metadataColumn.getDataType().getLogicalType();
        final DataType expectedMetadataDataType = metadataMap.get(metadataKey);
        // check that metadata key is valid
        if (expectedMetadataDataType == null) {
            throw new ValidationException(String.format("Invalid metadata key '%s' in column '%s' of table '%s'. " + "The %s class '%s' supports the following metadata keys for writing:\n%s", metadataKey, metadataColumn.getName(), tableDebugName, DynamicTableSink.class.getSimpleName(), sink.getClass().getName(), String.join("\n", metadataMap.keySet())));
        }
        // check that types are compatible
        if (!supportsExplicitCast(metadataType, expectedMetadataDataType.getLogicalType())) {
            if (metadataKey.equals(metadataColumn.getName())) {
                throw new ValidationException(String.format("Invalid data type for metadata column '%s' of table '%s'. " + "The column cannot be declared as '%s' because the type must be " + "castable to metadata type '%s'.", metadataColumn.getName(), tableDebugName, metadataType, expectedMetadataDataType.getLogicalType()));
            } else {
                throw new ValidationException(String.format("Invalid data type for metadata column '%s' with metadata key '%s' of table '%s'. " + "The column cannot be declared as '%s' because the type must be " + "castable to metadata type '%s'.", metadataColumn.getName(), metadataKey, tableDebugName, metadataType, expectedMetadataDataType.getLogicalType()));
            }
        }
    });
    sinkAbilitySpecs.add(new WritingMetadataSpec(createRequiredMetadataKeys(schema, sink), createConsumedType(schema, sink)));
}
Also used : WritingMetadataSpec(org.apache.flink.table.planner.plan.abilities.sink.WritingMetadataSpec) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) ValidationException(org.apache.flink.table.api.ValidationException) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) Column(org.apache.flink.table.catalog.Column) SupportsWritingMetadata(org.apache.flink.table.connector.sink.abilities.SupportsWritingMetadata) DataType(org.apache.flink.table.types.DataType) RelDataType(org.apache.calcite.rel.type.RelDataType) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Aggregations

ValidationException (org.apache.flink.table.api.ValidationException)143 DataType (org.apache.flink.table.types.DataType)25 Test (org.junit.Test)23 HashMap (java.util.HashMap)21 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)19 LogicalType (org.apache.flink.table.types.logical.LogicalType)18 TableException (org.apache.flink.table.api.TableException)17 List (java.util.List)14 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)14 QueryOperation (org.apache.flink.table.operations.QueryOperation)14 LinkedHashMap (java.util.LinkedHashMap)13 DescriptorProperties (org.apache.flink.table.descriptors.DescriptorProperties)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 Expression (org.apache.flink.table.expressions.Expression)12 TableSchema (org.apache.flink.table.api.TableSchema)11 Catalog (org.apache.flink.table.catalog.Catalog)11 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)11 ArrayList (java.util.ArrayList)10 Map (java.util.Map)10 Internal (org.apache.flink.annotation.Internal)10