use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method getAlteredTable.
private CatalogBaseTable getAlteredTable(String tableName, boolean expectView) {
ObjectIdentifier objectIdentifier = parseObjectIdentifier(tableName);
CatalogBaseTable catalogBaseTable = getCatalogBaseTable(objectIdentifier);
if (expectView) {
if (catalogBaseTable instanceof CatalogTable) {
throw new ValidationException("ALTER VIEW for a table is not allowed");
}
} else {
if (catalogBaseTable instanceof CatalogView) {
throw new ValidationException("ALTER TABLE for a view is not allowed");
}
}
return catalogBaseTable;
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class HiveParserDDLSemanticAnalyzer method convertAlterTableFileFormat.
private Operation convertAlterTableFileFormat(CatalogBaseTable alteredTable, HiveParserASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
HiveParserStorageFormat format = new HiveParserStorageFormat(conf);
HiveParserASTNode child = (HiveParserASTNode) ast.getChild(0);
if (!format.fillStorageFormat(child)) {
throw new ValidationException("Unknown AST node for ALTER TABLE FILEFORMAT: " + child);
}
Map<String, String> newProps = new HashMap<>();
newProps.put(ALTER_TABLE_OP, CHANGE_FILE_FORMAT.name());
newProps.put(STORED_AS_FILE_FORMAT, format.getGenericName());
return convertAlterTableProps(alteredTable, tableName, partSpec, newProps);
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class RegistryAvroFormatFactory method createEncodingFormat.
@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
FactoryUtil.validateFactoryOptions(this, formatOptions);
String schemaRegistryURL = formatOptions.get(URL);
Optional<String> subject = formatOptions.getOptional(SUBJECT);
Map<String, ?> optionalPropertiesMap = buildOptionalPropertiesMap(formatOptions);
if (!subject.isPresent()) {
throw new ValidationException(String.format("Option %s.%s is required for serialization", IDENTIFIER, SUBJECT.key()));
}
return new EncodingFormat<SerializationSchema<RowData>>() {
@Override
public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
final RowType rowType = (RowType) consumedDataType.getLogicalType();
return new AvroRowDataSerializationSchema(rowType, ConfluentRegistryAvroSerializationSchema.forGeneric(subject.get(), AvroSchemaConverter.convertToSchema(rowType), schemaRegistryURL, optionalPropertiesMap), RowDataToAvroConverters.createConverter(rowType));
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
};
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class DebeziumAvroFormatFactory method createEncodingFormat.
@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
FactoryUtil.validateFactoryOptions(this, formatOptions);
String schemaRegistryURL = formatOptions.get(URL);
Optional<String> subject = formatOptions.getOptional(SUBJECT);
Map<String, ?> optionalPropertiesMap = buildOptionalPropertiesMap(formatOptions);
if (!subject.isPresent()) {
throw new ValidationException(String.format("Option '%s.%s' is required for serialization", IDENTIFIER, SUBJECT.key()));
}
return new EncodingFormat<SerializationSchema<RowData>>() {
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.newBuilder().addContainedKind(RowKind.INSERT).addContainedKind(RowKind.UPDATE_BEFORE).addContainedKind(RowKind.UPDATE_AFTER).addContainedKind(RowKind.DELETE).build();
}
@Override
public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
final RowType rowType = (RowType) consumedDataType.getLogicalType();
return new DebeziumAvroSerializationSchema(rowType, schemaRegistryURL, subject.get(), optionalPropertiesMap);
}
};
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class KafkaDynamicTableFactory method validatePKConstraints.
private static void validatePKConstraints(ObjectIdentifier tableName, int[] primaryKeyIndexes, Map<String, String> options, Format format) {
if (primaryKeyIndexes.length > 0 && format.getChangelogMode().containsOnly(RowKind.INSERT)) {
Configuration configuration = Configuration.fromMap(options);
String formatName = configuration.getOptional(FactoryUtil.FORMAT).orElse(configuration.get(VALUE_FORMAT));
throw new ValidationException(String.format("The Kafka table '%s' with '%s' format doesn't support defining PRIMARY KEY constraint" + " on the table, because it can't guarantee the semantic of primary key.", tableName.asSummaryString(), formatName));
}
}
Aggregations