use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class FactoryUtil method enrichNoMatchingConnectorError.
private static ValidationException enrichNoMatchingConnectorError(Class<?> factoryClass, DynamicTableFactory.Context context, String connectorOption) {
final DynamicTableFactory factory;
try {
factory = discoverFactory(context.getClassLoader(), DynamicTableFactory.class, connectorOption);
} catch (ValidationException e) {
return new ValidationException(String.format("Cannot discover a connector using option: %s", stringifyOption(CONNECTOR.key(), connectorOption)), e);
}
final Class<?> sourceFactoryClass = DynamicTableSourceFactory.class;
final Class<?> sinkFactoryClass = DynamicTableSinkFactory.class;
// for a better exception message
if (sourceFactoryClass.equals(factoryClass) && sinkFactoryClass.isAssignableFrom(factory.getClass())) {
// discovering source, but not found, and this is a sink connector.
return new ValidationException(String.format("Connector '%s' can only be used as a sink. It cannot be used as a source.", connectorOption));
} else if (sinkFactoryClass.equals(factoryClass) && sourceFactoryClass.isAssignableFrom(factory.getClass())) {
// discovering sink, but not found, and this is a source connector.
return new ValidationException(String.format("Connector '%s' can only be used as a source. It cannot be used as a sink.", connectorOption));
} else {
return new ValidationException(String.format("Connector '%s' does neither implement the '%s' nor the '%s' interface.", connectorOption, sourceFactoryClass.getName(), sinkFactoryClass.getName()));
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class ValueLiteralExpression method validateValueDataType.
private static void validateValueDataType(Object value, DataType dataType) {
final LogicalType logicalType = dataType.getLogicalType();
if (value == null) {
if (!logicalType.isNullable()) {
throw new ValidationException(String.format("Data type '%s' does not support null values.", dataType));
}
return;
}
if (logicalType.isNullable()) {
throw new ValidationException("Literals that have a non-null value must not have a nullable data type.");
}
final Class<?> candidate = value.getClass();
// ensure value and data type match
if (!dataType.getConversionClass().isAssignableFrom(candidate)) {
throw new ValidationException(String.format("Data type '%s' with conversion class '%s' does not support a value literal of class '%s'.", dataType, dataType.getConversionClass().getName(), value.getClass().getName()));
}
// check for proper input as this cannot be checked in data type
if (!logicalType.supportsInputConversion(candidate)) {
throw new ValidationException(String.format("Data type '%s' does not support a conversion from class '%s'.", dataType, candidate.getName()));
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class SetOperationFactory method validateSetOperation.
private void validateSetOperation(SetQueryOperationType operationType, QueryOperation left, QueryOperation right) {
ResolvedSchema leftSchema = left.getResolvedSchema();
int leftFieldCount = leftSchema.getColumnCount();
ResolvedSchema rightSchema = right.getResolvedSchema();
int rightFieldCount = rightSchema.getColumnCount();
if (leftFieldCount != rightFieldCount) {
throw new ValidationException(format("The %s operation on two tables of different column sizes: %d and %d is not supported", operationType.toString().toLowerCase(), leftFieldCount, rightFieldCount));
}
final List<DataType> leftDataTypes = leftSchema.getColumnDataTypes();
final List<DataType> rightDataTypes = rightSchema.getColumnDataTypes();
IntStream.range(0, leftFieldCount).forEach(idx -> {
if (!findCommonColumnType(leftDataTypes, rightDataTypes, idx).isPresent()) {
throw new ValidationException(format("Incompatible types for %s operation. " + "Could not find a common type at position %s for '%s' and '%s'.", operationType.toString().toLowerCase(), idx, leftDataTypes.get(idx), rightDataTypes.get(idx)));
}
});
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class ValuesOperationFactory method findCommonTypeAtPosition.
private DataType findCommonTypeAtPosition(List<List<ResolvedExpression>> resolvedRows, int i) {
List<LogicalType> typesAtIPosition = extractLogicalTypesAtPosition(resolvedRows, i);
LogicalType logicalType = LogicalTypeMerging.findCommonType(typesAtIPosition).orElseThrow(() -> {
Set<DataType> columnTypes = resolvedRows.stream().map(row -> row.get(i).getOutputDataType()).collect(Collectors.toCollection(LinkedHashSet::new));
return new ValidationException(String.format("Types in fromValues(...) must have a common super type. Could not find a common type" + " for all rows at column %d.\n" + "Could not find a common super type for types: %s", i, columnTypes));
});
return TypeConversions.fromLogicalToDataType(logicalType);
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class OperationTreeBuilder method flatMap.
public QueryOperation flatMap(Expression tableFunctionCall, QueryOperation child) {
final ExpressionResolver resolver = getResolverBuilder(child).build();
final ResolvedExpression resolvedCall = resolveSingleExpression(tableFunctionCall, resolver);
if (!isFunctionOfKind(resolvedCall, FunctionKind.TABLE)) {
throw new ValidationException("Only a table function can be used in the flatMap operator.");
}
final List<String> originFieldNames = DataTypeUtils.flattenToNames(resolvedCall.getOutputDataType());
List<String> childFields = child.getResolvedSchema().getColumnNames();
Set<String> usedFieldNames = new HashSet<>(childFields);
List<Expression> args = new ArrayList<>();
for (String originFieldName : originFieldNames) {
String resultName = getUniqueName(originFieldName, usedFieldNames);
usedFieldNames.add(resultName);
args.add(valueLiteral(resultName));
}
args.add(0, tableFunctionCall);
Expression renamedTableFunction = unresolvedCall(BuiltInFunctionDefinitions.AS, args.toArray(new Expression[0]));
QueryOperation joinNode = joinLateral(child, renamedTableFunction, JoinType.INNER, Optional.empty());
QueryOperation rightNode = dropColumns(childFields.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), joinNode);
return alias(originFieldNames.stream().map(ApiExpressionUtils::unresolvedRef).collect(Collectors.toList()), rightNode);
}
Aggregations