Search in sources :

Example 96 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class FactoryUtil method enrichNoMatchingConnectorError.

private static ValidationException enrichNoMatchingConnectorError(Class<?> factoryClass, DynamicTableFactory.Context context, String connectorOption) {
    final DynamicTableFactory factory;
    try {
        factory = discoverFactory(context.getClassLoader(), DynamicTableFactory.class, connectorOption);
    } catch (ValidationException e) {
        return new ValidationException(String.format("Cannot discover a connector using option: %s", stringifyOption(CONNECTOR.key(), connectorOption)), e);
    }
    final Class<?> sourceFactoryClass = DynamicTableSourceFactory.class;
    final Class<?> sinkFactoryClass = DynamicTableSinkFactory.class;
    // for a better exception message
    if (sourceFactoryClass.equals(factoryClass) && sinkFactoryClass.isAssignableFrom(factory.getClass())) {
        // discovering source, but not found, and this is a sink connector.
        return new ValidationException(String.format("Connector '%s' can only be used as a sink. It cannot be used as a source.", connectorOption));
    } else if (sinkFactoryClass.equals(factoryClass) && sourceFactoryClass.isAssignableFrom(factory.getClass())) {
        // discovering sink, but not found, and this is a source connector.
        return new ValidationException(String.format("Connector '%s' can only be used as a source. It cannot be used as a sink.", connectorOption));
    } else {
        return new ValidationException(String.format("Connector '%s' does neither implement the '%s' nor the '%s' interface.", connectorOption, sourceFactoryClass.getName(), sinkFactoryClass.getName()));
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException)

Example 97 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class ValueLiteralExpression method validateValueDataType.

private static void validateValueDataType(Object value, DataType dataType) {
    final LogicalType logicalType = dataType.getLogicalType();
    if (value == null) {
        if (!logicalType.isNullable()) {
            throw new ValidationException(String.format("Data type '%s' does not support null values.", dataType));
        }
        return;
    }
    if (logicalType.isNullable()) {
        throw new ValidationException("Literals that have a non-null value must not have a nullable data type.");
    }
    final Class<?> candidate = value.getClass();
    // ensure value and data type match
    if (!dataType.getConversionClass().isAssignableFrom(candidate)) {
        throw new ValidationException(String.format("Data type '%s' with conversion class '%s' does not support a value literal of class '%s'.", dataType, dataType.getConversionClass().getName(), value.getClass().getName()));
    }
    // check for proper input as this cannot be checked in data type
    if (!logicalType.supportsInputConversion(candidate)) {
        throw new ValidationException(String.format("Data type '%s' does not support a conversion from class '%s'.", dataType, candidate.getName()));
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Example 98 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class SetOperationFactory method validateSetOperation.

private void validateSetOperation(SetQueryOperationType operationType, QueryOperation left, QueryOperation right) {
    ResolvedSchema leftSchema = left.getResolvedSchema();
    int leftFieldCount = leftSchema.getColumnCount();
    ResolvedSchema rightSchema = right.getResolvedSchema();
    int rightFieldCount = rightSchema.getColumnCount();
    if (leftFieldCount != rightFieldCount) {
        throw new ValidationException(format("The %s operation on two tables of different column sizes: %d and %d is not supported", operationType.toString().toLowerCase(), leftFieldCount, rightFieldCount));
    }
    final List<DataType> leftDataTypes = leftSchema.getColumnDataTypes();
    final List<DataType> rightDataTypes = rightSchema.getColumnDataTypes();
    IntStream.range(0, leftFieldCount).forEach(idx -> {
        if (!findCommonColumnType(leftDataTypes, rightDataTypes, idx).isPresent()) {
            throw new ValidationException(format("Incompatible types for %s operation. " + "Could not find a common type at position %s for '%s' and '%s'.", operationType.toString().toLowerCase(), idx, leftDataTypes.get(idx), rightDataTypes.get(idx)));
        }
    });
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) DataType(org.apache.flink.table.types.DataType) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema)

Example 99 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class ValuesOperationFactory method findCommonTypeAtPosition.

private DataType findCommonTypeAtPosition(List<List<ResolvedExpression>> resolvedRows, int i) {
    List<LogicalType> typesAtIPosition = extractLogicalTypesAtPosition(resolvedRows, i);
    LogicalType logicalType = LogicalTypeMerging.findCommonType(typesAtIPosition).orElseThrow(() -> {
        Set<DataType> columnTypes = resolvedRows.stream().map(row -> row.get(i).getOutputDataType()).collect(Collectors.toCollection(LinkedHashSet::new));
        return new ValidationException(String.format("Types in fromValues(...) must have a common super type. Could not find a common type" + " for all rows at column %d.\n" + "Could not find a common super type for types: %s", i, columnTypes));
    });
    return TypeConversions.fromLogicalToDataType(logicalType);
}
Also used : IntStream(java.util.stream.IntStream) DataType(org.apache.flink.table.types.DataType) QueryOperation(org.apache.flink.table.operations.QueryOperation) ApiExpressionUtils.valueLiteral(org.apache.flink.table.expressions.ApiExpressionUtils.valueLiteral) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) CallExpression(org.apache.flink.table.expressions.CallExpression) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) LogicalTypeMerging(org.apache.flink.table.types.logical.utils.LogicalTypeMerging) Expression(org.apache.flink.table.expressions.Expression) ExpressionDefaultVisitor(org.apache.flink.table.expressions.ExpressionDefaultVisitor) ArrayList(java.util.ArrayList) NULL(org.apache.flink.table.types.logical.LogicalTypeRoot.NULL) ResolvedExpression(org.apache.flink.table.expressions.ResolvedExpression) FieldsDataType(org.apache.flink.table.types.FieldsDataType) ExpressionResolver(org.apache.flink.table.expressions.resolver.ExpressionResolver) MAP(org.apache.flink.table.types.logical.LogicalTypeRoot.MAP) LogicalTypeCasts.supportsExplicitCast(org.apache.flink.table.types.logical.utils.LogicalTypeCasts.supportsExplicitCast) ValuesQueryOperation(org.apache.flink.table.operations.ValuesQueryOperation) Nullable(javax.annotation.Nullable) LinkedHashSet(java.util.LinkedHashSet) ROW(org.apache.flink.table.types.logical.LogicalTypeRoot.ROW) FunctionDefinition(org.apache.flink.table.functions.FunctionDefinition) BuiltInFunctionDefinitions(org.apache.flink.table.functions.BuiltInFunctionDefinitions) TableException(org.apache.flink.table.api.TableException) Set(java.util.Set) ValueLiteralExpression(org.apache.flink.table.expressions.ValueLiteralExpression) Collectors(java.util.stream.Collectors) List(java.util.List) CollectionDataType(org.apache.flink.table.types.CollectionDataType) LogicalType(org.apache.flink.table.types.logical.LogicalType) ARRAY(org.apache.flink.table.types.logical.LogicalTypeRoot.ARRAY) ValidationException(org.apache.flink.table.api.ValidationException) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) TypeConversions(org.apache.flink.table.types.utils.TypeConversions) Collections(java.util.Collections) ValidationException(org.apache.flink.table.api.ValidationException) LogicalType(org.apache.flink.table.types.logical.LogicalType) DataType(org.apache.flink.table.types.DataType) KeyValueDataType(org.apache.flink.table.types.KeyValueDataType) FieldsDataType(org.apache.flink.table.types.FieldsDataType) CollectionDataType(org.apache.flink.table.types.CollectionDataType)

Example 100 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class OperationTreeBuilder method flatMap.

public QueryOperation flatMap(Expression tableFunctionCall, QueryOperation child) {
    final ExpressionResolver resolver = getResolverBuilder(child).build();
    final ResolvedExpression resolvedCall = resolveSingleExpression(tableFunctionCall, resolver);
    if (!isFunctionOfKind(resolvedCall, FunctionKind.TABLE)) {
        throw new ValidationException("Only a table function can be used in the flatMap operator.");
    }
    final List<String> originFieldNames = DataTypeUtils.flattenToNames(resolvedCall.getOutputDataType());
    List<String> childFields = child.getResolvedSchema().getColumnNames();
    Set<String> usedFieldNames = new HashSet<>(childFields);
    List<Expression> args = new ArrayList<>();
    for (String originFieldName : originFieldNames) {
        String resultName = getUniqueName(originFieldName, usedFieldNames);
        usedFieldNames.add(resultName);
        args.add(valueLiteral(resultName));
    }
    args.add(0, tableFunctionCall);
    Expression renamedTableFunction = unresolvedCall(BuiltInFunctionDefinitions.AS, args.toArray(new Expression[0]));
    QueryOperation joinNode = joinLateral(child, renamedTableFunction, JoinType.INNER, Optional.empty());
    QueryOperation rightNode = dropColumns(childFields.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), joinNode);
    return alias(originFieldNames.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), rightNode);
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) ResolvedExpression(org.apache.flink.table.expressions.ResolvedExpression) Expression(org.apache.flink.table.expressions.Expression) UnresolvedCallExpression(org.apache.flink.table.expressions.UnresolvedCallExpression) ResolvedExpression(org.apache.flink.table.expressions.ResolvedExpression) ArrayList(java.util.ArrayList) ApiExpressionUtils(org.apache.flink.table.expressions.ApiExpressionUtils) ExpressionResolver(org.apache.flink.table.expressions.resolver.ExpressionResolver) SqlExpressionResolver(org.apache.flink.table.expressions.resolver.SqlExpressionResolver) HashSet(java.util.HashSet) ValuesQueryOperation(org.apache.flink.table.operations.ValuesQueryOperation) QueryOperation(org.apache.flink.table.operations.QueryOperation) DistinctQueryOperation(org.apache.flink.table.operations.DistinctQueryOperation) FilterQueryOperation(org.apache.flink.table.operations.FilterQueryOperation)

Aggregations

ValidationException (org.apache.flink.table.api.ValidationException)143 DataType (org.apache.flink.table.types.DataType)25 Test (org.junit.Test)23 HashMap (java.util.HashMap)21 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)19 LogicalType (org.apache.flink.table.types.logical.LogicalType)18 TableException (org.apache.flink.table.api.TableException)17 List (java.util.List)14 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)14 QueryOperation (org.apache.flink.table.operations.QueryOperation)14 LinkedHashMap (java.util.LinkedHashMap)13 DescriptorProperties (org.apache.flink.table.descriptors.DescriptorProperties)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 Expression (org.apache.flink.table.expressions.Expression)12 TableSchema (org.apache.flink.table.api.TableSchema)11 Catalog (org.apache.flink.table.catalog.Catalog)11 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)11 ArrayList (java.util.ArrayList)10 Map (java.util.Map)10 Internal (org.apache.flink.annotation.Internal)10