Search in sources :

Example 11 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method getAlteredTable.

private CatalogBaseTable getAlteredTable(String tableName, boolean expectView) {
    ObjectIdentifier objectIdentifier = parseObjectIdentifier(tableName);
    CatalogBaseTable catalogBaseTable = getCatalogBaseTable(objectIdentifier);
    if (expectView) {
        if (catalogBaseTable instanceof CatalogTable) {
            throw new ValidationException("ALTER VIEW for a table is not allowed");
        }
    } else {
        if (catalogBaseTable instanceof CatalogView) {
            throw new ValidationException("ALTER TABLE for a view is not allowed");
        }
    }
    return catalogBaseTable;
}
Also used : CatalogBaseTable(org.apache.flink.table.catalog.CatalogBaseTable) ValidationException(org.apache.flink.table.api.ValidationException) CatalogTable(org.apache.flink.table.catalog.CatalogTable) CatalogView(org.apache.flink.table.catalog.CatalogView) ObjectIdentifier(org.apache.flink.table.catalog.ObjectIdentifier)

Example 12 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class HiveParserDDLSemanticAnalyzer method convertAlterTableFileFormat.

private Operation convertAlterTableFileFormat(CatalogBaseTable alteredTable, HiveParserASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
    HiveParserStorageFormat format = new HiveParserStorageFormat(conf);
    HiveParserASTNode child = (HiveParserASTNode) ast.getChild(0);
    if (!format.fillStorageFormat(child)) {
        throw new ValidationException("Unknown AST node for ALTER TABLE FILEFORMAT: " + child);
    }
    Map<String, String> newProps = new HashMap<>();
    newProps.put(ALTER_TABLE_OP, CHANGE_FILE_FORMAT.name());
    newProps.put(STORED_AS_FILE_FORMAT, format.getGenericName());
    return convertAlterTableProps(alteredTable, tableName, partSpec, newProps);
}
Also used : HiveParserASTNode(org.apache.flink.table.planner.delegation.hive.copy.HiveParserASTNode) ValidationException(org.apache.flink.table.api.ValidationException) HiveParserStorageFormat(org.apache.flink.table.planner.delegation.hive.copy.HiveParserStorageFormat) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap)

Example 13 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class RegistryAvroFormatFactory method createEncodingFormat.

@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    String schemaRegistryURL = formatOptions.get(URL);
    Optional<String> subject = formatOptions.getOptional(SUBJECT);
    Map<String, ?> optionalPropertiesMap = buildOptionalPropertiesMap(formatOptions);
    if (!subject.isPresent()) {
        throw new ValidationException(String.format("Option %s.%s is required for serialization", IDENTIFIER, SUBJECT.key()));
    }
    return new EncodingFormat<SerializationSchema<RowData>>() {

        @Override
        public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
            final RowType rowType = (RowType) consumedDataType.getLogicalType();
            return new AvroRowDataSerializationSchema(rowType, ConfluentRegistryAvroSerializationSchema.forGeneric(subject.get(), AvroSchemaConverter.convertToSchema(rowType), schemaRegistryURL, optionalPropertiesMap), RowDataToAvroConverters.createConverter(rowType));
        }

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.insertOnly();
        }
    };
}
Also used : EncodingFormat(org.apache.flink.table.connector.format.EncodingFormat) RowData(org.apache.flink.table.data.RowData) AvroRowDataSerializationSchema(org.apache.flink.formats.avro.AvroRowDataSerializationSchema) ValidationException(org.apache.flink.table.api.ValidationException) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType)

Example 14 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class DebeziumAvroFormatFactory method createEncodingFormat.

@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    String schemaRegistryURL = formatOptions.get(URL);
    Optional<String> subject = formatOptions.getOptional(SUBJECT);
    Map<String, ?> optionalPropertiesMap = buildOptionalPropertiesMap(formatOptions);
    if (!subject.isPresent()) {
        throw new ValidationException(String.format("Option '%s.%s' is required for serialization", IDENTIFIER, SUBJECT.key()));
    }
    return new EncodingFormat<SerializationSchema<RowData>>() {

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.newBuilder().addContainedKind(RowKind.INSERT).addContainedKind(RowKind.UPDATE_BEFORE).addContainedKind(RowKind.UPDATE_AFTER).addContainedKind(RowKind.DELETE).build();
        }

        @Override
        public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
            final RowType rowType = (RowType) consumedDataType.getLogicalType();
            return new DebeziumAvroSerializationSchema(rowType, schemaRegistryURL, subject.get(), optionalPropertiesMap);
        }
    };
}
Also used : EncodingFormat(org.apache.flink.table.connector.format.EncodingFormat) RowData(org.apache.flink.table.data.RowData) ValidationException(org.apache.flink.table.api.ValidationException) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType)

Example 15 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class KafkaDynamicTableFactory method validatePKConstraints.

private static void validatePKConstraints(ObjectIdentifier tableName, int[] primaryKeyIndexes, Map<String, String> options, Format format) {
    if (primaryKeyIndexes.length > 0 && format.getChangelogMode().containsOnly(RowKind.INSERT)) {
        Configuration configuration = Configuration.fromMap(options);
        String formatName = configuration.getOptional(FactoryUtil.FORMAT).orElse(configuration.get(VALUE_FORMAT));
        throw new ValidationException(String.format("The Kafka table '%s' with '%s' format doesn't support defining PRIMARY KEY constraint" + " on the table, because it can't guarantee the semantic of primary key.", tableName.asSummaryString(), formatName));
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) Configuration(org.apache.flink.configuration.Configuration)

Aggregations

ValidationException (org.apache.flink.table.api.ValidationException)143 DataType (org.apache.flink.table.types.DataType)25 Test (org.junit.Test)23 HashMap (java.util.HashMap)21 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)19 LogicalType (org.apache.flink.table.types.logical.LogicalType)18 TableException (org.apache.flink.table.api.TableException)17 List (java.util.List)14 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)14 QueryOperation (org.apache.flink.table.operations.QueryOperation)14 LinkedHashMap (java.util.LinkedHashMap)13 DescriptorProperties (org.apache.flink.table.descriptors.DescriptorProperties)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 Expression (org.apache.flink.table.expressions.Expression)12 TableSchema (org.apache.flink.table.api.TableSchema)11 Catalog (org.apache.flink.table.catalog.Catalog)11 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)11 ArrayList (java.util.ArrayList)10 Map (java.util.Map)10 Internal (org.apache.flink.annotation.Internal)10