use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class DynamicSourceUtils method validateAndApplyMetadata.
private static void validateAndApplyMetadata(String tableDebugName, ResolvedSchema schema, DynamicTableSource source) {
final List<MetadataColumn> metadataColumns = extractMetadataColumns(schema);
if (metadataColumns.isEmpty()) {
return;
}
if (!(source instanceof SupportsReadingMetadata)) {
throw new ValidationException(String.format("Table '%s' declares metadata columns, but the underlying %s doesn't implement " + "the %s interface. Therefore, metadata cannot be read from the given source.", source.asSummaryString(), DynamicTableSource.class.getSimpleName(), SupportsReadingMetadata.class.getSimpleName()));
}
final SupportsReadingMetadata metadataSource = (SupportsReadingMetadata) source;
final Map<String, DataType> metadataMap = metadataSource.listReadableMetadata();
metadataColumns.forEach(c -> {
final String metadataKey = c.getMetadataKey().orElse(c.getName());
final LogicalType metadataType = c.getDataType().getLogicalType();
final DataType expectedMetadataDataType = metadataMap.get(metadataKey);
// check that metadata key is valid
if (expectedMetadataDataType == null) {
throw new ValidationException(String.format("Invalid metadata key '%s' in column '%s' of table '%s'. " + "The %s class '%s' supports the following metadata keys for reading:\n%s", metadataKey, c.getName(), tableDebugName, DynamicTableSource.class.getSimpleName(), source.getClass().getName(), String.join("\n", metadataMap.keySet())));
}
// check that types are compatible
if (!supportsExplicitCast(expectedMetadataDataType.getLogicalType(), metadataType)) {
if (metadataKey.equals(c.getName())) {
throw new ValidationException(String.format("Invalid data type for metadata column '%s' of table '%s'. " + "The column cannot be declared as '%s' because the type must be " + "castable from metadata type '%s'.", c.getName(), tableDebugName, expectedMetadataDataType.getLogicalType(), metadataType));
} else {
throw new ValidationException(String.format("Invalid data type for metadata column '%s' with metadata key '%s' of table '%s'. " + "The column cannot be declared as '%s' because the type must be " + "castable from metadata type '%s'.", c.getName(), metadataKey, tableDebugName, expectedMetadataDataType.getLogicalType(), metadataType));
}
}
});
metadataSource.applyReadableMetadata(createRequiredMetadataKeys(schema, source), TypeConversions.fromLogicalToDataType(createProducedType(schema, source)));
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class RawType method restore.
// --------------------------------------------------------------------------------------------
/**
* Restores a raw type from the components of a serialized string representation.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public static RawType<?> restore(ClassLoader classLoader, String className, String serializerString) {
try {
final Class<?> clazz = Class.forName(className, true, classLoader);
final byte[] bytes = EncodingUtils.decodeBase64ToBytes(serializerString);
final DataInputDeserializer inputDeserializer = new DataInputDeserializer(bytes);
final TypeSerializerSnapshot<?> snapshot = TypeSerializerSnapshot.readVersionedSnapshot(inputDeserializer, classLoader);
return (RawType<?>) new RawType(clazz, snapshot.restoreSerializer());
} catch (Throwable t) {
throw new ValidationException(String.format("Unable to restore the RAW type of class '%s' with serializer snapshot '%s'.", className, serializerString), t);
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class SqlToOperationConverter method convertCreateDatabase.
/**
* Convert CREATE DATABASE statement.
*/
private Operation convertCreateDatabase(SqlCreateDatabase sqlCreateDatabase) {
String[] fullDatabaseName = sqlCreateDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new ValidationException("create database identifier format error");
}
String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
boolean ignoreIfExists = sqlCreateDatabase.isIfNotExists();
String databaseComment = sqlCreateDatabase.getComment().map(comment -> comment.getNlsString().getValue()).orElse(null);
// set with properties
Map<String, String> properties = new HashMap<>();
sqlCreateDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, databaseComment);
return new CreateDatabaseOperation(catalogName, databaseName, catalogDatabase, ignoreIfExists);
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class SqlToOperationConverter method convertAlterTableOptions.
private Operation convertAlterTableOptions(ObjectIdentifier tableIdentifier, CatalogTable oldTable, SqlAlterTableOptions alterTableOptions) {
LinkedHashMap<String, String> partitionKVs = alterTableOptions.getPartitionKVs();
// it's altering partitions
if (partitionKVs != null) {
CatalogPartitionSpec partitionSpec = new CatalogPartitionSpec(partitionKVs);
CatalogPartition catalogPartition = catalogManager.getPartition(tableIdentifier, partitionSpec).orElseThrow(() -> new ValidationException(String.format("Partition %s of table %s doesn't exist", partitionSpec.getPartitionSpec(), tableIdentifier)));
Map<String, String> newProps = new HashMap<>(catalogPartition.getProperties());
newProps.putAll(OperationConverterUtils.extractProperties(alterTableOptions.getPropertyList()));
return new AlterPartitionPropertiesOperation(tableIdentifier, partitionSpec, new CatalogPartitionImpl(newProps, catalogPartition.getComment()));
} else {
// it's altering a table
Map<String, String> newOptions = new HashMap<>(oldTable.getOptions());
newOptions.putAll(OperationConverterUtils.extractProperties(alterTableOptions.getPropertyList()));
return new AlterTableOptionsOperation(tableIdentifier, oldTable.copy(newOptions));
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class SqlToOperationConverter method convertAlterFunction.
/**
* Convert ALTER FUNCTION statement.
*/
private Operation convertAlterFunction(SqlAlterFunction sqlAlterFunction) {
if (sqlAlterFunction.isSystemFunction()) {
throw new ValidationException("Alter temporary system function is not supported");
}
FunctionLanguage language = parseLanguage(sqlAlterFunction.getFunctionLanguage());
CatalogFunction catalogFunction = new CatalogFunctionImpl(sqlAlterFunction.getFunctionClassName().getValueAs(String.class), language);
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlAlterFunction.getFunctionIdentifier());
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return new AlterCatalogFunctionOperation(identifier, catalogFunction, sqlAlterFunction.isIfExists(), sqlAlterFunction.isTemporary());
}
Aggregations