Search in sources :

Example 31 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class DefaultSchemaResolver method resolveWatermarkSpecs.

private List<WatermarkSpec> resolveWatermarkSpecs(List<UnresolvedWatermarkSpec> unresolvedWatermarkSpecs, List<Column> inputColumns) {
    if (unresolvedWatermarkSpecs.size() == 0) {
        return Collections.emptyList();
    }
    if (unresolvedWatermarkSpecs.size() > 1) {
        throw new ValidationException("Multiple watermark definitions are not supported yet.");
    }
    final UnresolvedWatermarkSpec watermarkSpec = unresolvedWatermarkSpecs.get(0);
    // validate time attribute
    final String timeColumn = watermarkSpec.getColumnName();
    final Column validatedTimeColumn = validateTimeColumn(timeColumn, inputColumns);
    // resolve watermark expression
    final ResolvedExpression watermarkExpression;
    try {
        watermarkExpression = resolveExpression(inputColumns, watermarkSpec.getWatermarkExpression(), validatedTimeColumn.getDataType());
    } catch (Exception e) {
        throw new ValidationException(String.format("Invalid expression for watermark '%s'.", watermarkSpec.toString()), e);
    }
    final LogicalType outputType = watermarkExpression.getOutputDataType().getLogicalType();
    final LogicalType timeColumnType = validatedTimeColumn.getDataType().getLogicalType();
    validateWatermarkExpression(outputType);
    if (outputType.getTypeRoot() != timeColumnType.getTypeRoot()) {
        throw new ValidationException(String.format("The watermark declaration's output data type '%s' is different " + "from the time field's data type '%s'.", outputType, timeColumnType));
    }
    return Collections.singletonList(WatermarkSpec.of(watermarkSpec.getColumnName(), watermarkExpression));
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) PhysicalColumn(org.apache.flink.table.catalog.Column.PhysicalColumn) UnresolvedComputedColumn(org.apache.flink.table.api.Schema.UnresolvedComputedColumn) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) ComputedColumn(org.apache.flink.table.catalog.Column.ComputedColumn) UnresolvedMetadataColumn(org.apache.flink.table.api.Schema.UnresolvedMetadataColumn) UnresolvedPhysicalColumn(org.apache.flink.table.api.Schema.UnresolvedPhysicalColumn) ResolvedExpression(org.apache.flink.table.expressions.ResolvedExpression) DataTypeUtils.replaceLogicalType(org.apache.flink.table.types.utils.DataTypeUtils.replaceLogicalType) LogicalType(org.apache.flink.table.types.logical.LogicalType) UnresolvedWatermarkSpec(org.apache.flink.table.api.Schema.UnresolvedWatermarkSpec) ValidationException(org.apache.flink.table.api.ValidationException)

Example 32 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class DefaultSchemaResolver method validateTimeColumn.

private Column validateTimeColumn(String columnName, List<Column> columns) {
    final Optional<Column> timeColumn = columns.stream().filter(c -> c.getName().equals(columnName)).findFirst();
    if (!timeColumn.isPresent()) {
        throw new ValidationException(String.format("Invalid column name '%s' for rowtime attribute in watermark declaration. Available columns are: %s", columnName, columns.stream().map(Column::getName).collect(Collectors.toList())));
    }
    final LogicalType timeFieldType = timeColumn.get().getDataType().getLogicalType();
    if (!canBeTimeAttributeType(timeFieldType) || getPrecision(timeFieldType) > 3) {
        throw new ValidationException(String.format("Invalid data type of time field for watermark definition. " + "The field must be of type TIMESTAMP(p) or TIMESTAMP_LTZ(p)," + " the supported precision 'p' is from 0 to 3, but the time field type is %s", timeFieldType));
    }
    if (isProctimeAttribute(timeFieldType)) {
        throw new ValidationException("A watermark can not be defined for a processing-time attribute.");
    }
    return timeColumn.get();
}
Also used : PhysicalColumn(org.apache.flink.table.catalog.Column.PhysicalColumn) DataType(org.apache.flink.table.types.DataType) Arrays(java.util.Arrays) Schema(org.apache.flink.table.api.Schema) ApiExpressionUtils.localRef(org.apache.flink.table.expressions.ApiExpressionUtils.localRef) Expression(org.apache.flink.table.expressions.Expression) Function(java.util.function.Function) UnresolvedComputedColumn(org.apache.flink.table.api.Schema.UnresolvedComputedColumn) LocalReferenceExpression(org.apache.flink.table.expressions.LocalReferenceExpression) TimestampType(org.apache.flink.table.types.logical.TimestampType) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) TimestampKind(org.apache.flink.table.types.logical.TimestampKind) ResolvedExpression(org.apache.flink.table.expressions.ResolvedExpression) Map(java.util.Map) UnresolvedPrimaryKey(org.apache.flink.table.api.Schema.UnresolvedPrimaryKey) DataTypeUtils.replaceLogicalType(org.apache.flink.table.types.utils.DataTypeUtils.replaceLogicalType) Nullable(javax.annotation.Nullable) LocalZonedTimestampType(org.apache.flink.table.types.logical.LocalZonedTimestampType) LogicalTypeChecks.isProctimeAttribute(org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isProctimeAttribute) Set(java.util.Set) Collectors(java.util.stream.Collectors) ComputedColumn(org.apache.flink.table.catalog.Column.ComputedColumn) Objects(java.util.Objects) LogicalTypeChecks.canBeTimeAttributeType(org.apache.flink.table.types.logical.utils.LogicalTypeChecks.canBeTimeAttributeType) List(java.util.List) ExpressionResolverBuilder(org.apache.flink.table.expressions.resolver.ExpressionResolver.ExpressionResolverBuilder) Stream(java.util.stream.Stream) LogicalType(org.apache.flink.table.types.logical.LogicalType) UnresolvedWatermarkSpec(org.apache.flink.table.api.Schema.UnresolvedWatermarkSpec) ValidationException(org.apache.flink.table.api.ValidationException) UnresolvedMetadataColumn(org.apache.flink.table.api.Schema.UnresolvedMetadataColumn) Optional(java.util.Optional) Internal(org.apache.flink.annotation.Internal) UnresolvedPhysicalColumn(org.apache.flink.table.api.Schema.UnresolvedPhysicalColumn) LogicalTypeChecks.getPrecision(org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getPrecision) Collections(java.util.Collections) ValidationException(org.apache.flink.table.api.ValidationException) PhysicalColumn(org.apache.flink.table.catalog.Column.PhysicalColumn) UnresolvedComputedColumn(org.apache.flink.table.api.Schema.UnresolvedComputedColumn) MetadataColumn(org.apache.flink.table.catalog.Column.MetadataColumn) ComputedColumn(org.apache.flink.table.catalog.Column.ComputedColumn) UnresolvedMetadataColumn(org.apache.flink.table.api.Schema.UnresolvedMetadataColumn) UnresolvedPhysicalColumn(org.apache.flink.table.api.Schema.UnresolvedPhysicalColumn) DataTypeUtils.replaceLogicalType(org.apache.flink.table.types.utils.DataTypeUtils.replaceLogicalType) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Example 33 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class FunctionCatalog method dropTempCatalogFunction.

/**
 * Drop a temporary catalog function.
 *
 * @param identifier identifier of the function
 * @param ignoreIfNotExist Flag to specify behavior when the function does not exist: if set to
 *     false, throw an exception, if set to true, do nothing.
 * @return the removed catalog function, which is null if function doesn't exist and
 *     ignoreIfNotExist is true.
 */
public CatalogFunction dropTempCatalogFunction(ObjectIdentifier identifier, boolean ignoreIfNotExist) {
    ObjectIdentifier normalizedName = FunctionIdentifier.normalizeObjectIdentifier(identifier);
    CatalogFunction fd = tempCatalogFunctions.get(normalizedName);
    if (fd != null) {
        catalogManager.getTemporaryOperationListener(normalizedName).ifPresent(l -> l.onDropTemporaryFunction(normalizedName.toObjectPath()));
        tempCatalogFunctions.remove(normalizedName);
    } else if (!ignoreIfNotExist) {
        throw new ValidationException(String.format("Temporary catalog function %s doesn't exist", identifier));
    }
    return fd;
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException)

Example 34 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class FunctionCatalog method dropCatalogFunction.

/**
 * Drops a catalog function by also considering temporary catalog functions. Returns true if a
 * function was dropped.
 */
public boolean dropCatalogFunction(UnresolvedIdentifier unresolvedIdentifier, boolean ignoreIfNotExist) {
    final ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    final ObjectIdentifier normalizedIdentifier = FunctionIdentifier.normalizeObjectIdentifier(identifier);
    final Catalog catalog = catalogManager.getCatalog(normalizedIdentifier.getCatalogName()).orElseThrow(IllegalStateException::new);
    final ObjectPath path = identifier.toObjectPath();
    // we force users to deal with temporary catalog functions first
    if (tempCatalogFunctions.containsKey(normalizedIdentifier)) {
        throw new ValidationException(String.format("Could not drop catalog function. A temporary function '%s' does already exist. " + "Please drop the temporary function first.", identifier.asSummaryString()));
    }
    if (!catalog.functionExists(path)) {
        if (ignoreIfNotExist) {
            return false;
        }
        throw new ValidationException(String.format("Could not drop catalog function. A function '%s' doesn't exist.", identifier.asSummaryString()));
    }
    try {
        catalog.dropFunction(path, ignoreIfNotExist);
    } catch (Throwable t) {
        throw new TableException(String.format("Could not drop catalog function '%s'.", identifier.asSummaryString()), t);
    }
    return true;
}
Also used : TableException(org.apache.flink.table.api.TableException) ValidationException(org.apache.flink.table.api.ValidationException)

Example 35 with ValidationException

use of org.apache.flink.table.api.ValidationException in project flink by apache.

the class FunctionCatalog method registerCatalogFunction.

/**
 * Registers a catalog function by also considering temporary catalog functions.
 */
public void registerCatalogFunction(UnresolvedIdentifier unresolvedIdentifier, Class<? extends UserDefinedFunction> functionClass, boolean ignoreIfExists) {
    final ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
    final ObjectIdentifier normalizedIdentifier = FunctionIdentifier.normalizeObjectIdentifier(identifier);
    try {
        UserDefinedFunctionHelper.validateClass(functionClass);
    } catch (Throwable t) {
        throw new ValidationException(String.format("Could not register catalog function '%s' due to implementation errors.", identifier.asSummaryString()), t);
    }
    final Catalog catalog = catalogManager.getCatalog(normalizedIdentifier.getCatalogName()).orElseThrow(IllegalStateException::new);
    final ObjectPath path = identifier.toObjectPath();
    // we force users to deal with temporary catalog functions first
    if (tempCatalogFunctions.containsKey(normalizedIdentifier)) {
        if (ignoreIfExists) {
            return;
        }
        throw new ValidationException(String.format("Could not register catalog function. A temporary function '%s' does already exist. " + "Please drop the temporary function first.", identifier.asSummaryString()));
    }
    if (catalog.functionExists(path)) {
        if (ignoreIfExists) {
            return;
        }
        throw new ValidationException(String.format("Could not register catalog function. A function '%s' does already exist.", identifier.asSummaryString()));
    }
    final CatalogFunction catalogFunction = new CatalogFunctionImpl(functionClass.getName(), FunctionLanguage.JAVA);
    try {
        catalog.createFunction(path, catalogFunction, ignoreIfExists);
    } catch (Throwable t) {
        throw new TableException(String.format("Could not register catalog function '%s'.", identifier.asSummaryString()), t);
    }
}
Also used : TableException(org.apache.flink.table.api.TableException) ValidationException(org.apache.flink.table.api.ValidationException)

Aggregations

ValidationException (org.apache.flink.table.api.ValidationException)143 DataType (org.apache.flink.table.types.DataType)25 Test (org.junit.Test)23 HashMap (java.util.HashMap)21 ObjectIdentifier (org.apache.flink.table.catalog.ObjectIdentifier)19 LogicalType (org.apache.flink.table.types.logical.LogicalType)18 TableException (org.apache.flink.table.api.TableException)17 List (java.util.List)14 CatalogBaseTable (org.apache.flink.table.catalog.CatalogBaseTable)14 QueryOperation (org.apache.flink.table.operations.QueryOperation)14 LinkedHashMap (java.util.LinkedHashMap)13 DescriptorProperties (org.apache.flink.table.descriptors.DescriptorProperties)13 CatalogTable (org.apache.flink.table.catalog.CatalogTable)12 Expression (org.apache.flink.table.expressions.Expression)12 TableSchema (org.apache.flink.table.api.TableSchema)11 Catalog (org.apache.flink.table.catalog.Catalog)11 ContextResolvedTable (org.apache.flink.table.catalog.ContextResolvedTable)11 ArrayList (java.util.ArrayList)10 Map (java.util.Map)10 Internal (org.apache.flink.annotation.Internal)10