use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class TableEnvironmentImpl method registerTableSourceInternal.
@Override
public void registerTableSourceInternal(String name, TableSource<?> tableSource) {
validateTableSource(tableSource);
ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name));
Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier);
if (table.isPresent()) {
if (table.get() instanceof ConnectorCatalogTable<?, ?>) {
ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get();
if (sourceSinkTable.getTableSource().isPresent()) {
throw new ValidationException(String.format("Table '%s' already exists. Please choose a different name.", name));
} else {
// wrapper contains only sink (not source)
ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable.sourceAndSink(tableSource, sourceSinkTable.getTableSink().get(), !IS_STREAM_TABLE);
catalogManager.dropTemporaryTable(objectIdentifier, false);
catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false);
}
} else {
throw new ValidationException(String.format("Table '%s' already exists. Please choose a different name.", name));
}
} else {
ConnectorCatalogTable source = ConnectorCatalogTable.source(tableSource, !IS_STREAM_TABLE);
catalogManager.createTemporaryTable(source, objectIdentifier, false);
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class TableEnvironmentImpl method alterCatalogFunction.
private TableResultInternal alterCatalogFunction(AlterCatalogFunctionOperation alterCatalogFunctionOperation) {
String exMsg = getDDLOpExecuteErrorMsg(alterCatalogFunctionOperation.asSummaryString());
try {
CatalogFunction function = alterCatalogFunctionOperation.getCatalogFunction();
if (alterCatalogFunctionOperation.isTemporary()) {
throw new ValidationException("Alter temporary catalog function is not supported");
} else {
Catalog catalog = getCatalogOrThrowException(alterCatalogFunctionOperation.getFunctionIdentifier().getCatalogName());
catalog.alterFunction(alterCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(), function, alterCatalogFunctionOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw e;
} catch (FunctionNotExistException e) {
throw new ValidationException(e.getMessage(), e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class TableEnvironmentImpl method dropTemporaryTable.
@Override
public boolean dropTemporaryTable(String path) {
UnresolvedIdentifier unresolvedIdentifier = getParser().parseIdentifier(path);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
try {
catalogManager.dropTemporaryTable(identifier, false);
return true;
} catch (ValidationException e) {
return false;
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class CatalogManager method dropTemporaryTableInternal.
private void dropTemporaryTableInternal(ObjectIdentifier objectIdentifier, Predicate<CatalogBaseTable> filter, boolean ignoreIfNotExists) {
CatalogBaseTable catalogBaseTable = temporaryTables.get(objectIdentifier);
if (filter.test(catalogBaseTable)) {
getTemporaryOperationListener(objectIdentifier).ifPresent(l -> l.onDropTemporaryTable(objectIdentifier.toObjectPath()));
Catalog catalog = catalogs.get(objectIdentifier.getCatalogName());
ResolvedCatalogBaseTable<?> resolvedTable = resolveCatalogBaseTable(catalogBaseTable);
managedTableListener.notifyTableDrop(catalog, objectIdentifier, resolvedTable, true, ignoreIfNotExists);
temporaryTables.remove(objectIdentifier);
} else if (!ignoreIfNotExists) {
throw new ValidationException(String.format("Temporary table or view with identifier '%s' does not exist.", objectIdentifier.asSummaryString()));
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class DefaultSchemaResolver method adjustRowtimeAttribute.
private Column adjustRowtimeAttribute(List<WatermarkSpec> watermarkSpecs, Column column) {
final String name = column.getName();
final DataType dataType = column.getDataType();
final boolean hasWatermarkSpec = watermarkSpecs.stream().anyMatch(s -> s.getRowtimeAttribute().equals(name));
if (hasWatermarkSpec && isStreamingMode) {
switch(dataType.getLogicalType().getTypeRoot()) {
case TIMESTAMP_WITHOUT_TIME_ZONE:
final TimestampType originalType = (TimestampType) dataType.getLogicalType();
final LogicalType rowtimeType = new TimestampType(originalType.isNullable(), TimestampKind.ROWTIME, originalType.getPrecision());
return column.copy(replaceLogicalType(dataType, rowtimeType));
case TIMESTAMP_WITH_LOCAL_TIME_ZONE:
final LocalZonedTimestampType timestampLtzType = (LocalZonedTimestampType) dataType.getLogicalType();
final LogicalType rowtimeLtzType = new LocalZonedTimestampType(timestampLtzType.isNullable(), TimestampKind.ROWTIME, timestampLtzType.getPrecision());
return column.copy(replaceLogicalType(dataType, rowtimeLtzType));
default:
throw new ValidationException("Invalid data type of expression for rowtime definition. " + "The field must be of type TIMESTAMP(p) or TIMESTAMP_LTZ(p)," + " the supported precision 'p' is from 0 to 3.");
}
}
return column;
}
Aggregations