use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class CatalogSourceTable method computeContextResolvedTable.
private ContextResolvedTable computeContextResolvedTable(FlinkContext context, Map<String, String> hintedOptions) {
ContextResolvedTable contextResolvedTable = schemaTable.getContextResolvedTable();
if (hintedOptions.isEmpty()) {
return contextResolvedTable;
}
final ReadableConfig config = context.getTableConfig().getConfiguration();
if (!config.get(TableConfigOptions.TABLE_DYNAMIC_TABLE_OPTIONS_ENABLED)) {
throw new ValidationException(String.format("The '%s' hint is allowed only when the config option '%s' is set to true.", FlinkHints.HINT_NAME_OPTIONS, TableConfigOptions.TABLE_DYNAMIC_TABLE_OPTIONS_ENABLED.key()));
}
if (contextResolvedTable.getResolvedTable().getTableKind() == TableKind.VIEW) {
throw new ValidationException(String.format("View '%s' cannot be enriched with new options. " + "Hints can only be applied to tables.", contextResolvedTable.getIdentifier()));
}
return contextResolvedTable.copy(FlinkHints.mergeTableOptions(hintedOptions, contextResolvedTable.getResolvedTable().getOptions()));
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class TypeInferenceUtil method inferOutputType.
/**
* Infers an output type using the given {@link TypeStrategy}. It assumes that input arguments
* have been adapted before if necessary.
*/
public static DataType inferOutputType(CallContext callContext, TypeStrategy outputTypeStrategy) {
final Optional<DataType> potentialOutputType = outputTypeStrategy.inferType(callContext);
if (!potentialOutputType.isPresent()) {
throw new ValidationException("Could not infer an output type for the given arguments.");
}
final DataType outputType = potentialOutputType.get();
if (isUnknown(outputType)) {
throw new ValidationException("Could not infer an output type for the given arguments. Untyped NULL received.");
}
return outputType;
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class FlinkCalciteSqlValidator method validateJoin.
@Override
protected void validateJoin(SqlJoin join, SqlValidatorScope scope) {
// temporarily forbid the common predicates until the problem is fixed (see FLINK-7865).
if (join.getJoinType() == JoinType.LEFT && SqlUtil.stripAs(join.getRight()).getKind() == SqlKind.COLLECTION_TABLE) {
SqlNode right = SqlUtil.stripAs(join.getRight());
if (right instanceof SqlBasicCall) {
SqlBasicCall call = (SqlBasicCall) right;
SqlNode operand0 = call.operand(0);
if (operand0 instanceof SqlBasicCall && ((SqlBasicCall) operand0).getOperator() instanceof SqlWindowTableFunction) {
return;
}
}
final SqlNode condition = join.getCondition();
if (condition != null && (!SqlUtil.isLiteral(condition) || ((SqlLiteral) condition).getValueAs(Boolean.class) != Boolean.TRUE)) {
throw new ValidationException(String.format("Left outer joins with a table function do not accept a predicate such as %s. " + "Only literal TRUE is accepted.", condition));
}
}
super.validateJoin(join, scope);
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class FunctionCatalogOperatorTable method verifyFunctionKind.
/**
* Verifies which kinds of functions are allowed to be returned from the catalog given the
* context information.
*/
private boolean verifyFunctionKind(@Nullable SqlFunctionCategory category, ContextResolvedFunction resolvedFunction) {
final FunctionDefinition definition = resolvedFunction.getDefinition();
// built-in functions without implementation are handled separately
if (definition instanceof BuiltInFunctionDefinition) {
final BuiltInFunctionDefinition builtInFunction = (BuiltInFunctionDefinition) definition;
if (!builtInFunction.hasRuntimeImplementation()) {
return false;
}
}
final FunctionKind kind = definition.getKind();
if (kind == FunctionKind.TABLE) {
return true;
} else if (kind == FunctionKind.SCALAR || kind == FunctionKind.AGGREGATE || kind == FunctionKind.TABLE_AGGREGATE) {
if (category != null && category.isTableFunction()) {
throw new ValidationException(String.format("Function '%s' cannot be used as a table function.", resolvedFunction));
}
return true;
}
return false;
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class DynamicSinkUtils method validateAndApplyMetadata.
private static void validateAndApplyMetadata(String tableDebugName, DynamicTableSink sink, ResolvedSchema schema, List<SinkAbilitySpec> sinkAbilitySpecs) {
final List<Column> columns = schema.getColumns();
final List<Integer> metadataColumns = extractPersistedMetadataColumns(schema);
if (metadataColumns.isEmpty()) {
return;
}
if (!(sink instanceof SupportsWritingMetadata)) {
throw new ValidationException(String.format("Table '%s' declares persistable metadata columns, but the underlying %s " + "doesn't implement the %s interface. If the column should not " + "be persisted, it can be declared with the VIRTUAL keyword.", tableDebugName, DynamicTableSink.class.getSimpleName(), SupportsWritingMetadata.class.getSimpleName()));
}
final Map<String, DataType> metadataMap = ((SupportsWritingMetadata) sink).listWritableMetadata();
metadataColumns.forEach(pos -> {
final MetadataColumn metadataColumn = (MetadataColumn) columns.get(pos);
final String metadataKey = metadataColumn.getMetadataKey().orElse(metadataColumn.getName());
final LogicalType metadataType = metadataColumn.getDataType().getLogicalType();
final DataType expectedMetadataDataType = metadataMap.get(metadataKey);
// check that metadata key is valid
if (expectedMetadataDataType == null) {
throw new ValidationException(String.format("Invalid metadata key '%s' in column '%s' of table '%s'. " + "The %s class '%s' supports the following metadata keys for writing:\n%s", metadataKey, metadataColumn.getName(), tableDebugName, DynamicTableSink.class.getSimpleName(), sink.getClass().getName(), String.join("\n", metadataMap.keySet())));
}
// check that types are compatible
if (!supportsExplicitCast(metadataType, expectedMetadataDataType.getLogicalType())) {
if (metadataKey.equals(metadataColumn.getName())) {
throw new ValidationException(String.format("Invalid data type for metadata column '%s' of table '%s'. " + "The column cannot be declared as '%s' because the type must be " + "castable to metadata type '%s'.", metadataColumn.getName(), tableDebugName, metadataType, expectedMetadataDataType.getLogicalType()));
} else {
throw new ValidationException(String.format("Invalid data type for metadata column '%s' with metadata key '%s' of table '%s'. " + "The column cannot be declared as '%s' because the type must be " + "castable to metadata type '%s'.", metadataColumn.getName(), metadataKey, tableDebugName, metadataType, expectedMetadataDataType.getLogicalType()));
}
}
});
sinkAbilitySpecs.add(new WritingMetadataSpec(createRequiredMetadataKeys(schema, sink), createConsumedType(schema, sink)));
}
Aggregations