Search in sources :

Example 66 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class AliasOperationUtils method createAliasList.

/**
 * Creates a list of valid alias expressions. Resulting expression might still contain {@link
 * UnresolvedReferenceExpression}.
 *
 * @param aliases aliases to validate
 * @param child relational operation on top of which to apply the aliases
 * @return validated list of aliases
 */
static List<Expression> createAliasList(List<Expression> aliases, QueryOperation child) {
    ResolvedSchema childSchema = child.getResolvedSchema();
    if (aliases.size() > childSchema.getColumnCount()) {
        throw new ValidationException("Aliasing more fields than we actually have.");
    }
    List<ValueLiteralExpression> fieldAliases = aliases.stream().map(f -> f.accept(aliasLiteralValidator)).collect(Collectors.toList());
    List<String> childNames = childSchema.getColumnNames();
    return IntStream.range(0, childNames.size()).mapToObj(idx -> {
        UnresolvedReferenceExpression oldField = unresolvedRef(childNames.get(idx));
        if (idx < fieldAliases.size()) {
            ValueLiteralExpression alias = fieldAliases.get(idx);
            return unresolvedCall(BuiltInFunctionDefinitions.AS, oldField, alias);
        } else {
            return oldField;
        }
    }).collect(Collectors.toList());
}
Also used : IntStream(java.util.stream.IntStream) QueryOperation(org.apache.flink.table.operations.QueryOperation) ApiExpressionUtils.valueLiteral(org.apache.flink.table.expressions.ApiExpressionUtils.valueLiteral) UnresolvedReferenceExpression(org.apache.flink.table.expressions.UnresolvedReferenceExpression) ExpressionUtils(org.apache.flink.table.expressions.ExpressionUtils) ApiExpressionDefaultVisitor(org.apache.flink.table.expressions.utils.ApiExpressionDefaultVisitor) BuiltInFunctionDefinitions(org.apache.flink.table.functions.BuiltInFunctionDefinitions) ApiExpressionUtils.unresolvedCall(org.apache.flink.table.expressions.ApiExpressionUtils.unresolvedCall) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) Expression(org.apache.flink.table.expressions.Expression) ValueLiteralExpression(org.apache.flink.table.expressions.ValueLiteralExpression) Collectors(java.util.stream.Collectors) ApiExpressionUtils.unresolvedRef(org.apache.flink.table.expressions.ApiExpressionUtils.unresolvedRef) List(java.util.List) ValidationException(org.apache.flink.table.api.ValidationException) Internal(org.apache.flink.annotation.Internal) ValidationException(org.apache.flink.table.api.ValidationException) ValueLiteralExpression(org.apache.flink.table.expressions.ValueLiteralExpression) UnresolvedReferenceExpression(org.apache.flink.table.expressions.UnresolvedReferenceExpression) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema)

Example 67 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class TestValuesTableFactory method validateAndExtractRowtimeIndex.

private static int validateAndExtractRowtimeIndex(CatalogTable sinkTable, boolean dropLateEvent, boolean isInsertOnly) {
    if (!dropLateEvent) {
        return -1;
    } else if (!isInsertOnly) {
        throw new ValidationException("Option 'sink.drop-late-event' only works for insert-only sink now.");
    }
    TableSchema schema = sinkTable.getSchema();
    List<WatermarkSpec> watermarkSpecs = schema.getWatermarkSpecs();
    if (watermarkSpecs.size() == 0) {
        throw new ValidationException("Please define the watermark in the schema that is used to indicate the rowtime column. " + "The sink function will compare the rowtime and the current watermark to determine whether the event is late.");
    }
    String rowtimeName = watermarkSpecs.get(0).getRowtimeAttribute();
    return Arrays.asList(schema.getFieldNames()).indexOf(rowtimeName);
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) WatermarkSpec(org.apache.flink.table.api.WatermarkSpec)

Example 68 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class OperationConverterUtils method convertChangeColumn.

public static Operation convertChangeColumn(ObjectIdentifier tableIdentifier, SqlChangeColumn changeColumn, CatalogTable catalogTable, SqlValidator sqlValidator) {
    String oldName = changeColumn.getOldName().getSimple();
    if (catalogTable.getPartitionKeys().indexOf(oldName) >= 0) {
        // disallow changing partition columns
        throw new ValidationException("CHANGE COLUMN cannot be applied to partition columns");
    }
    TableSchema oldSchema = catalogTable.getSchema();
    boolean first = changeColumn.isFirst();
    String after = changeColumn.getAfter() == null ? null : changeColumn.getAfter().getSimple();
    TableColumn newTableColumn = toTableColumn(changeColumn.getNewColumn(), sqlValidator);
    TableSchema newSchema = changeColumn(oldSchema, oldName, newTableColumn, first, after);
    Map<String, String> newProperties = new HashMap<>(catalogTable.getOptions());
    newProperties.putAll(extractProperties(changeColumn.getProperties()));
    return new AlterTableSchemaOperation(tableIdentifier, new CatalogTableImpl(newSchema, catalogTable.getPartitionKeys(), newProperties, catalogTable.getComment()));
// TODO: handle watermark and constraints
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) CatalogTableImpl(org.apache.flink.table.catalog.CatalogTableImpl) AlterTableSchemaOperation(org.apache.flink.table.operations.ddl.AlterTableSchemaOperation) TableColumn(org.apache.flink.table.api.TableColumn) SqlTableColumn(org.apache.flink.sql.parser.ddl.SqlTableColumn)

Example 69 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class FileSystemTableFactoryTest method testUnsupportedOptionSink.

@Test
public void testUnsupportedOptionSink() {
    DescriptorProperties descriptor = new DescriptorProperties();
    descriptor.putString(FactoryUtil.CONNECTOR.key(), "filesystem");
    descriptor.putString("path", "/tmp");
    descriptor.putString("format", "csv");
    descriptor.putString("my_option", "my");
    try {
        createTableSink(SCHEMA, descriptor.asMap());
    } catch (ValidationException e) {
        Throwable cause = e.getCause();
        assertTrue(cause.toString(), cause instanceof ValidationException);
        assertTrue(cause.getMessage(), cause.getMessage().contains("Unsupported options:\n\nmy_option"));
        return;
    }
    fail("Should fail by ValidationException.");
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) Test(org.junit.Test)

Example 70 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class FileSystemTableFactoryTest method testUnsupportedOptionSource.

@Test
public void testUnsupportedOptionSource() {
    DescriptorProperties descriptor = new DescriptorProperties();
    descriptor.putString(FactoryUtil.CONNECTOR.key(), "filesystem");
    descriptor.putString("path", "/tmp");
    descriptor.putString("format", "csv");
    descriptor.putString("my_option", "my");
    try {
        createTableSource(SCHEMA, descriptor.asMap());
    } catch (ValidationException e) {
        Throwable cause = e.getCause();
        assertTrue(cause.toString(), cause instanceof ValidationException);
        assertTrue(cause.getMessage(), cause.getMessage().contains("Unsupported options:\n\nmy_option"));
        return;
    }
    fail("Should fail by ValidationException.");
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) DescriptorProperties(org.apache.flink.table.descriptors.DescriptorProperties) Test(org.junit.Test)

Aggregations

ValidationException (org.apache.flink.table.api.ValidationException)143 DataType (org.apache.flink.table.types.DataType)25 Test (org.junit.Test)23 HashMap (java.util.HashMap)21 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)19 LogicalType (org.apache.flink.table.types.logical.LogicalType)18 TableException (org.apache.flink.table.api.TableException)17 List (java.util.List)14 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)14 QueryOperation (org.apache.flink.table.operations.QueryOperation)14 LinkedHashMap (java.util.LinkedHashMap)13 DescriptorProperties (org.apache.flink.table.descriptors.DescriptorProperties)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 Expression (org.apache.flink.table.expressions.Expression)12 TableSchema (org.apache.flink.table.api.TableSchema)11 Catalog (org.apache.flink.table.catalog.Catalog)11 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)11 ArrayList (java.util.ArrayList)10 Map (java.util.Map)10 Internal (org.apache.flink.annotation.Internal)10