use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class FunctionITCase method testCreateTemporaryCatalogFunction.
@Test
public void testCreateTemporaryCatalogFunction() {
String ddl1 = "create temporary function default_catalog.default_database.f4" + " as '" + TEST_FUNCTION + "'";
String ddl2 = "create temporary function if not exists default_catalog.default_database.f4" + " as '" + TEST_FUNCTION + "'";
String ddl3 = "drop temporary function default_catalog.default_database.f4";
String ddl4 = "drop temporary function if exists default_catalog.default_database.f4";
tEnv().executeSql(ddl1);
assertTrue(Arrays.asList(tEnv().listFunctions()).contains("f4"));
tEnv().executeSql(ddl2);
assertTrue(Arrays.asList(tEnv().listFunctions()).contains("f4"));
tEnv().executeSql(ddl3);
assertFalse(Arrays.asList(tEnv().listFunctions()).contains("f4"));
tEnv().executeSql(ddl1);
try {
tEnv().executeSql(ddl1);
} catch (Exception e) {
assertTrue(e instanceof ValidationException);
assertEquals("Could not register temporary catalog function. A function 'default_catalog.default_database.f4' does already exist.", e.getMessage());
}
tEnv().executeSql(ddl3);
tEnv().executeSql(ddl4);
try {
tEnv().executeSql(ddl3);
} catch (Exception e) {
assertTrue(e instanceof ValidationException);
assertEquals("Temporary catalog function `default_catalog`.`default_database`.`f4`" + " doesn't exist", e.getMessage());
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class ContextResolvedTableJsonSerializer method serialize.
@Override
public void serialize(ContextResolvedTable contextResolvedTable, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException {
final CatalogPlanCompilation planCompilationOption = SerdeContext.get(serializerProvider).getConfiguration().get(TableConfigOptions.PLAN_COMPILE_CATALOG_OBJECTS);
if (contextResolvedTable.isAnonymous() && planCompilationOption == CatalogPlanCompilation.IDENTIFIER) {
throw cannotSerializeAnonymousTable(contextResolvedTable.getIdentifier());
}
jsonGenerator.writeStartObject();
if (!contextResolvedTable.isAnonymous()) {
// Serialize object identifier
jsonGenerator.writeObjectField(FIELD_NAME_IDENTIFIER, contextResolvedTable.getIdentifier());
}
if ((contextResolvedTable.isPermanent() || contextResolvedTable.isAnonymous()) && planCompilationOption != CatalogPlanCompilation.IDENTIFIER) {
jsonGenerator.writeFieldName(FIELD_NAME_CATALOG_TABLE);
try {
ResolvedCatalogTableJsonSerializer.serialize(contextResolvedTable.getResolvedTable(), planCompilationOption == CatalogPlanCompilation.ALL, jsonGenerator, serializerProvider);
} catch (ValidationException e) {
throw new ValidationException(String.format("Error when trying to serialize table '%s'.", contextResolvedTable.getIdentifier()), e);
}
}
jsonGenerator.writeEndObject();
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class ResolvedCatalogTableJsonSerializer method serialize.
static void serialize(ResolvedCatalogTable resolvedCatalogTable, boolean serializeOptions, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException {
// This should never happen anyway, but we keep this assertion for sanity check
assert resolvedCatalogTable.getTableKind() == CatalogBaseTable.TableKind.TABLE;
jsonGenerator.writeStartObject();
if (resolvedCatalogTable.getOrigin() instanceof ExternalCatalogTable) {
throw new ValidationException("Cannot serialize the table as it's an external inline table. " + "This might be caused by a usage of " + "StreamTableEnvironment#fromDataStream or TableResult#collect, " + "which are not supported in compiled plans.");
}
serializerProvider.defaultSerializeField(RESOLVED_SCHEMA, resolvedCatalogTable.getResolvedSchema(), jsonGenerator);
jsonGenerator.writeObjectField(PARTITION_KEYS, resolvedCatalogTable.getPartitionKeys());
if (serializeOptions) {
if (!resolvedCatalogTable.getComment().isEmpty()) {
jsonGenerator.writeObjectField(COMMENT, resolvedCatalogTable.getComment());
}
try {
jsonGenerator.writeObjectField(OPTIONS, resolvedCatalogTable.getOptions());
} catch (Exception e) {
throw new ValidationException(String.format("The table is not serializable as %s#getOptions() failed. " + "It seems the table is not intended to be stored in a " + "persisted plan. Either declare the table as a temporary " + "table or use '%s' = '%s' / '%s' to only compile an identifier " + "into the plan.", resolvedCatalogTable.getOrigin().getClass(), TableConfigOptions.PLAN_COMPILE_CATALOG_OBJECTS.key(), CatalogPlanCompilation.SCHEMA.name(), CatalogPlanCompilation.IDENTIFIER.name()), e);
}
}
jsonGenerator.writeEndObject();
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class ColumnJsonDeserializer method deserialize.
@Override
public Column deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
ObjectNode jsonNode = jsonParser.readValueAsTree();
String columnName = jsonNode.required(NAME).asText();
String columnKind = Optional.ofNullable(jsonNode.get(KIND)).map(JsonNode::asText).orElse(KIND_PHYSICAL);
Column column;
switch(columnKind) {
case KIND_PHYSICAL:
column = deserializePhysicalColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
break;
case KIND_COMPUTED:
column = deserializeComputedColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
break;
case KIND_METADATA:
column = deserializeMetadataColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
break;
default:
throw new ValidationException(String.format("Cannot recognize column type '%s'. Allowed types: %s.", columnKind, SUPPORTED_KINDS));
}
return column.withComment(deserializeOptionalField(jsonNode, COMMENT, String.class, jsonParser.getCodec(), ctx).orElse(null));
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class ContextResolvedTableJsonDeserializer method deserialize.
@Override
public ContextResolvedTable deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
final CatalogPlanRestore planRestoreOption = SerdeContext.get(ctx).getConfiguration().get(PLAN_RESTORE_CATALOG_OBJECTS);
final CatalogManager catalogManager = SerdeContext.get(ctx).getFlinkContext().getCatalogManager();
final ObjectNode objectNode = jsonParser.readValueAsTree();
// Deserialize the two fields, if available
final ObjectIdentifier identifier = JsonSerdeUtil.deserializeOptionalField(objectNode, FIELD_NAME_IDENTIFIER, ObjectIdentifier.class, jsonParser.getCodec(), ctx).orElse(null);
ResolvedCatalogTable resolvedCatalogTable = JsonSerdeUtil.deserializeOptionalField(objectNode, FIELD_NAME_CATALOG_TABLE, ResolvedCatalogTable.class, jsonParser.getCodec(), ctx).orElse(null);
if (identifier == null && resolvedCatalogTable == null) {
throw new ValidationException(String.format("The input JSON is invalid because it doesn't contain '%s', nor the '%s'.", FIELD_NAME_IDENTIFIER, FIELD_NAME_CATALOG_TABLE));
}
if (identifier == null) {
if (isLookupForced(planRestoreOption)) {
throw missingIdentifier();
}
return ContextResolvedTable.anonymous(resolvedCatalogTable);
}
Optional<ContextResolvedTable> contextResolvedTableFromCatalog = isLookupEnabled(planRestoreOption) ? catalogManager.getTable(identifier) : Optional.empty();
// If we have a schema from the plan and from the catalog, we need to check they match.
if (contextResolvedTableFromCatalog.isPresent() && resolvedCatalogTable != null) {
ResolvedSchema schemaFromPlan = resolvedCatalogTable.getResolvedSchema();
ResolvedSchema schemaFromCatalog = contextResolvedTableFromCatalog.get().getResolvedSchema();
if (!areResolvedSchemasEqual(schemaFromPlan, schemaFromCatalog)) {
throw schemaNotMatching(identifier, schemaFromPlan, schemaFromCatalog);
}
}
if (resolvedCatalogTable == null || isLookupForced(planRestoreOption)) {
if (!isLookupEnabled(planRestoreOption)) {
throw lookupDisabled(identifier);
}
// We use what is stored inside the catalog
return contextResolvedTableFromCatalog.orElseThrow(() -> missingTableFromCatalog(identifier, isLookupForced(planRestoreOption)));
}
if (contextResolvedTableFromCatalog.isPresent()) {
// SCHEMA, so we just need to return the catalog query result
if (objectNode.at("/" + FIELD_NAME_CATALOG_TABLE + "/" + OPTIONS).isMissingNode()) {
return contextResolvedTableFromCatalog.get();
}
return contextResolvedTableFromCatalog.flatMap(ContextResolvedTable::getCatalog).map(c -> ContextResolvedTable.permanent(identifier, c, resolvedCatalogTable)).orElseGet(() -> ContextResolvedTable.temporary(identifier, resolvedCatalogTable));
}
return ContextResolvedTable.temporary(identifier, resolvedCatalogTable);
}
Aggregations