Search in sources :

Example 56 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class CommonExecSinkITCase method testNullEnforcer.

@Test
public void testNullEnforcer() throws ExecutionException, InterruptedException {
    final StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
    final List<Row> rows = Arrays.asList(Row.of(1, "Apache", 11), Row.of(2, null, 22), Row.of(null, "Flink", 33), Row.of(null, null, 44));
    final SharedReference<List<RowData>> results = sharedObjects.add(new ArrayList<>());
    tableEnv.createTable("T1", TableFactoryHarness.newBuilder().schema(schemaForNotNullEnforcer()).source(new TestSource(rows)).sink(buildRuntimeSinkProvider(new RecordWriter(results))).build());
    // Default config - ignore (no trim)
    final ExecutionException ee = assertThrows(ExecutionException.class, () -> tableEnv.executeSql("INSERT INTO T1 SELECT * FROM T1").await());
    assertThat(ExceptionUtils.findThrowableWithMessage(ee, "Column 'b' is NOT NULL, however, a null value is being written into it. " + "You can set job configuration 'table.exec.sink.not-null-enforcer'='DROP' " + "to suppress this exception and drop such records silently.").isPresent()).isTrue();
    // Test not including a NOT NULL column
    results.get().clear();
    final ValidationException ve = assertThrows(ValidationException.class, () -> tableEnv.executeSql("INSERT INTO T1(a, b) SELECT (a, b) FROM T1").await());
    assertThat(ve.getMessage()).isEqualTo("SQL validation failed. At line 0, column 0: Column 'c' has no default " + "value and does not allow NULLs");
    // Change config option to "drop", to drop the columns instead of throwing errors
    try {
        tableEnv.getConfig().set(TABLE_EXEC_SINK_NOT_NULL_ENFORCER.key(), ExecutionConfigOptions.NotNullEnforcer.DROP.name());
        results.get().clear();
        tableEnv.executeSql("INSERT INTO T1 SELECT * FROM T1").await();
        assertThat(results.get().size()).isEqualTo(2);
        assertThat(results.get().get(0).getInt(0)).isEqualTo(1);
        assertThat(results.get().get(0).getString(1).toString()).isEqualTo("Apache");
        assertThat(results.get().get(0).getInt(2)).isEqualTo(11);
        assertThat(results.get().get(1).isNullAt(0)).isTrue();
        assertThat(results.get().get(1).getString(1).toString()).isEqualTo("Flink");
        assertThat(results.get().get(1).getInt(2)).isEqualTo(33);
    } finally {
        tableEnv.getConfig().set(TABLE_EXEC_SINK_NOT_NULL_ENFORCER.key(), ExecutionConfigOptions.NotNullEnforcer.ERROR.name());
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) List(java.util.List) ArrayList(java.util.ArrayList) StreamTableEnvironment(org.apache.flink.table.api.bridge.java.StreamTableEnvironment) Row(org.apache.flink.types.Row) ExecutionException(java.util.concurrent.ExecutionException) Test(org.junit.Test)

Example 57 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class SchemaValidator method validate.

@Override
public void validate(DescriptorProperties properties) {
    Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME);
    Map<String, String> legacyTypes = properties.getIndexedProperty(SCHEMA, SCHEMA_TYPE);
    Map<String, String> dataTypes = properties.getIndexedProperty(SCHEMA, SCHEMA_DATA_TYPE);
    if (names.isEmpty() && legacyTypes.isEmpty() && dataTypes.isEmpty()) {
        throw new ValidationException(format("Could not find the required schema in property '%s'.", SCHEMA));
    }
    boolean proctimeFound = false;
    for (int i = 0; i < Math.max(names.size(), legacyTypes.size()); i++) {
        properties.validateString(SCHEMA + "." + i + "." + SCHEMA_NAME, false, 1);
        properties.validateDataType(SCHEMA + "." + i + "." + SCHEMA_DATA_TYPE, SCHEMA + "." + i + "." + SCHEMA_TYPE, false);
        properties.validateString(SCHEMA + "." + i + "." + SCHEMA_FROM, true, 1);
        // either proctime or rowtime
        String proctime = SCHEMA + "." + i + "." + SCHEMA_PROCTIME;
        String rowtime = SCHEMA + "." + i + "." + ROWTIME;
        if (properties.containsKey(proctime)) {
            // check the environment
            if (!isStreamEnvironment) {
                throw new ValidationException(format("Property '%s' is not allowed in a batch environment.", proctime));
            } else // check for only one proctime attribute
            if (proctimeFound) {
                throw new ValidationException("A proctime attribute must only be defined once.");
            }
            // check proctime
            properties.validateBoolean(proctime, false);
            proctimeFound = properties.getBoolean(proctime);
            // no rowtime
            properties.validatePrefixExclusion(rowtime);
        } else if (properties.hasPrefix(rowtime)) {
            // check rowtime
            RowtimeValidator rowtimeValidator = new RowtimeValidator(supportsSourceTimestamps, supportsSourceWatermarks, SCHEMA + "." + i + ".");
            rowtimeValidator.validate(properties);
            // no proctime
            properties.validateExclusion(proctime);
        }
    }
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException)

Example 58 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class SchemaValidator method deriveFieldMapping.

/**
 * Finds a table source field mapping.
 *
 * @param properties The properties describing a schema.
 * @param inputType The input type that a connector and/or format produces. This parameter can
 *     be used to resolve a rowtime field against an input field.
 */
public static Map<String, String> deriveFieldMapping(DescriptorProperties properties, Optional<TypeInformation<?>> inputType) {
    Map<String, String> mapping = new HashMap<>();
    TableSchema schema = properties.getTableSchema(SCHEMA);
    List<String> columnNames = new ArrayList<>();
    inputType.ifPresent(t -> columnNames.addAll(Arrays.asList(((CompositeType) t).getFieldNames())));
    // add all source fields first because rowtime might reference one of them
    columnNames.forEach(name -> mapping.put(name, name));
    // add all schema fields first for implicit mappings
    Arrays.stream(schema.getFieldNames()).forEach(name -> mapping.put(name, name));
    Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME);
    for (int i = 0; i < names.size(); i++) {
        String name = properties.getString(SCHEMA + "." + i + "." + SCHEMA_NAME);
        Optional<String> source = properties.getOptionalString(SCHEMA + "." + i + "." + SCHEMA_FROM);
        if (source.isPresent()) {
            // add explicit mapping
            mapping.put(name, source.get());
        } else {
            // implicit mapping or time
            boolean isProctime = properties.getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME).orElse(false);
            boolean isRowtime = properties.containsKey(SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_TYPE);
            boolean isGeneratedColumn = properties.containsKey(SCHEMA + "." + i + "." + EXPR);
            // remove proctime/rowtime from mapping
            if (isProctime || isRowtime || isGeneratedColumn) {
                mapping.remove(name);
            } else // check for invalid fields
            if (!columnNames.contains(name)) {
                throw new ValidationException(format("Could not map the schema field '%s' to a field " + "from source. Please specify the source field from which it can be derived.", name));
            }
        }
    }
    return mapping;
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) TableSchema(org.apache.flink.table.api.TableSchema) HashMap(java.util.HashMap) ArrayList(java.util.ArrayList)

Example 59 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class ProjectionOperationFactory method validateAndGetUniqueNames.

private String[] validateAndGetUniqueNames(List<ResolvedExpression> namedExpressions) {
    // we need to maintain field names order to match with types
    final Set<String> names = new LinkedHashSet<>();
    extractNames(namedExpressions).stream().map(name -> name.orElseThrow(() -> new TableException("Could not name a field in a projection."))).forEach(name -> {
        if (!names.add(name)) {
            throw new ValidationException("Ambiguous column name: " + name);
        }
    });
    return names.toArray(new String[0]);
}
Also used : LinkedHashSet(java.util.LinkedHashSet) IntStream(java.util.stream.IntStream) DataType(org.apache.flink.table.types.DataType) QueryOperation(org.apache.flink.table.operations.QueryOperation) CallExpression(org.apache.flink.table.expressions.CallExpression) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) Expression(org.apache.flink.table.expressions.Expression) LocalReferenceExpression(org.apache.flink.table.expressions.LocalReferenceExpression) ResolvedExpressionDefaultVisitor(org.apache.flink.table.expressions.utils.ResolvedExpressionDefaultVisitor) ResolvedExpression(org.apache.flink.table.expressions.ResolvedExpression) ExpressionResolver(org.apache.flink.table.expressions.resolver.ExpressionResolver) TableReferenceExpression(org.apache.flink.table.expressions.TableReferenceExpression) GET(org.apache.flink.table.functions.BuiltInFunctionDefinitions.GET) FieldReferenceExpression(org.apache.flink.table.expressions.FieldReferenceExpression) LinkedHashSet(java.util.LinkedHashSet) CAST(org.apache.flink.table.functions.BuiltInFunctionDefinitions.CAST) AS(org.apache.flink.table.functions.BuiltInFunctionDefinitions.AS) FunctionDefinition(org.apache.flink.table.functions.FunctionDefinition) BuiltInFunctionDefinitions(org.apache.flink.table.functions.BuiltInFunctionDefinitions) ProjectQueryOperation(org.apache.flink.table.operations.ProjectQueryOperation) TableException(org.apache.flink.table.api.TableException) Set(java.util.Set) INTEGER(org.apache.flink.table.types.logical.LogicalTypeRoot.INTEGER) OperationExpressionsUtils.extractName(org.apache.flink.table.operations.utils.OperationExpressionsUtils.extractName) ValueLiteralExpression(org.apache.flink.table.expressions.ValueLiteralExpression) Collectors(java.util.stream.Collectors) List(java.util.List) LogicalType(org.apache.flink.table.types.logical.LogicalType) ValidationException(org.apache.flink.table.api.ValidationException) OperationExpressionsUtils.extractNames(org.apache.flink.table.operations.utils.OperationExpressionsUtils.extractNames) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) TableException(org.apache.flink.table.api.TableException) ValidationException(org.apache.flink.table.api.ValidationException)

Example 60 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class OperationTreeBuilder method filter.

public QueryOperation filter(Expression condition, QueryOperation child) {
    ExpressionResolver resolver = getResolver(child);
    ResolvedExpression resolvedExpression = resolveSingleExpression(condition, resolver);
    DataType conditionType = resolvedExpression.getOutputDataType();
    if (!conditionType.getLogicalType().is(BOOLEAN)) {
        throw new ValidationException("Filter operator requires a boolean expression as input," + " but $condition is of type " + conditionType);
    }
    return new FilterQueryOperation(resolvedExpression, child);
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) ResolvedExpression(org.apache.flink.table.expressions.ResolvedExpression) DataType(org.apache.flink.table.types.DataType) ExpressionResolver(org.apache.flink.table.expressions.resolver.ExpressionResolver) SqlExpressionResolver(org.apache.flink.table.expressions.resolver.SqlExpressionResolver) FilterQueryOperation(org.apache.flink.table.operations.FilterQueryOperation)

Aggregations

ValidationException (org.apache.flink.table.api.ValidationException)143 DataType (org.apache.flink.table.types.DataType)25 Test (org.junit.Test)23 HashMap (java.util.HashMap)21 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)19 LogicalType (org.apache.flink.table.types.logical.LogicalType)18 TableException (org.apache.flink.table.api.TableException)17 List (java.util.List)14 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)14 QueryOperation (org.apache.flink.table.operations.QueryOperation)14 LinkedHashMap (java.util.LinkedHashMap)13 DescriptorProperties (org.apache.flink.table.descriptors.DescriptorProperties)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 Expression (org.apache.flink.table.expressions.Expression)12 TableSchema (org.apache.flink.table.api.TableSchema)11 Catalog (org.apache.flink.table.catalog.Catalog)11 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)11 ArrayList (java.util.ArrayList)10 Map (java.util.Map)10 Internal (org.apache.flink.annotation.Internal)10