use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class SqlToOperationConverter method convertAlterView.
/**
* convert ALTER VIEW statement.
*/
private Operation convertAlterView(SqlAlterView alterView) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(alterView.fullViewName());
ObjectIdentifier viewIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
Optional<ContextResolvedTable> optionalCatalogTable = catalogManager.getTable(viewIdentifier);
if (!optionalCatalogTable.isPresent() || optionalCatalogTable.get().isTemporary()) {
throw new ValidationException(String.format("View %s doesn't exist or is a temporary view.", viewIdentifier.toString()));
}
CatalogBaseTable baseTable = optionalCatalogTable.get().getTable();
if (baseTable instanceof CatalogTable) {
throw new ValidationException("ALTER VIEW for a table is not allowed");
}
if (alterView instanceof SqlAlterViewRename) {
UnresolvedIdentifier newUnresolvedIdentifier = UnresolvedIdentifier.of(((SqlAlterViewRename) alterView).fullNewViewName());
ObjectIdentifier newTableIdentifier = catalogManager.qualifyIdentifier(newUnresolvedIdentifier);
return new AlterViewRenameOperation(viewIdentifier, newTableIdentifier);
} else if (alterView instanceof SqlAlterViewProperties) {
SqlAlterViewProperties alterViewProperties = (SqlAlterViewProperties) alterView;
CatalogView oldView = (CatalogView) baseTable;
Map<String, String> newProperties = new HashMap<>(oldView.getOptions());
newProperties.putAll(OperationConverterUtils.extractProperties(alterViewProperties.getPropertyList()));
CatalogView newView = new CatalogViewImpl(oldView.getOriginalQuery(), oldView.getExpandedQuery(), oldView.getSchema(), newProperties, oldView.getComment());
return new AlterViewPropertiesOperation(viewIdentifier, newView);
} else if (alterView instanceof SqlAlterViewAs) {
SqlAlterViewAs alterViewAs = (SqlAlterViewAs) alterView;
final SqlNode newQuery = alterViewAs.getNewQuery();
CatalogView oldView = (CatalogView) baseTable;
CatalogView newView = convertViewQuery(newQuery, Collections.emptyList(), oldView.getOptions(), oldView.getComment());
return new AlterViewAsOperation(viewIdentifier, newView);
} else {
throw new ValidationException(String.format("[%s] needs to implement", alterView.toSqlString(CalciteSqlDialect.DEFAULT)));
}
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class SqlToOperationConverter method convertRichExplain.
/**
* Convert RICH EXPLAIN statement.
*/
private Operation convertRichExplain(SqlRichExplain sqlExplain) {
SqlNode sqlNode = sqlExplain.getStatement();
Operation operation;
if (sqlNode instanceof RichSqlInsert) {
operation = convertSqlInsert((RichSqlInsert) sqlNode);
} else if (sqlNode instanceof SqlStatementSet) {
operation = convertSqlStatementSet((SqlStatementSet) sqlNode);
} else if (sqlNode.getKind().belongsTo(SqlKind.QUERY)) {
operation = convertSqlQuery(sqlExplain.getStatement());
} else {
throw new ValidationException(String.format("EXPLAIN statement doesn't support %s", sqlNode.getKind()));
}
return new ExplainOperation(operation, sqlExplain.getExplainDetails());
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class SqlToOperationConverter method convertAlterDatabase.
/**
* Convert ALTER DATABASE statement.
*/
private Operation convertAlterDatabase(SqlAlterDatabase sqlAlterDatabase) {
String[] fullDatabaseName = sqlAlterDatabase.fullDatabaseName();
if (fullDatabaseName.length > 2) {
throw new ValidationException("alter database identifier format error");
}
String catalogName = (fullDatabaseName.length == 1) ? catalogManager.getCurrentCatalog() : fullDatabaseName[0];
String databaseName = (fullDatabaseName.length == 1) ? fullDatabaseName[0] : fullDatabaseName[1];
final Map<String, String> properties;
CatalogDatabase originCatalogDatabase;
Optional<Catalog> catalog = catalogManager.getCatalog(catalogName);
if (catalog.isPresent()) {
try {
originCatalogDatabase = catalog.get().getDatabase(databaseName);
properties = new HashMap<>(originCatalogDatabase.getProperties());
} catch (DatabaseNotExistException e) {
throw new ValidationException(String.format("Database %s not exists", databaseName), e);
}
} else {
throw new ValidationException(String.format("Catalog %s not exists", catalogName));
}
// set with properties
sqlAlterDatabase.getPropertyList().getList().forEach(p -> properties.put(((SqlTableOption) p).getKeyString(), ((SqlTableOption) p).getValueString()));
CatalogDatabase catalogDatabase = new CatalogDatabaseImpl(properties, originCatalogDatabase.getComment());
return new AlterDatabaseOperation(catalogName, databaseName, catalogDatabase);
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class SqlToOperationConverter method convertViewQuery.
/**
* Convert the query part of a VIEW statement.
*/
private CatalogView convertViewQuery(SqlNode query, List<SqlNode> fieldNames, Map<String, String> props, String comment) {
// Put the sql string unparse (getQuotedSqlString()) in front of
// the node conversion (toQueryOperation()),
// because before Calcite 1.22.0, during sql-to-rel conversion, the SqlWindow
// bounds state would be mutated as default when they are null (not specified).
// This bug is fixed in CALCITE-3877 of Calcite 1.23.0.
String originalQuery = getQuotedSqlString(query);
SqlNode validateQuery = flinkPlanner.validate(query);
// The LATERAL operator was eliminated during sql validation, thus the unparsed SQL
// does not contain LATERAL which is problematic,
// the issue was resolved in CALCITE-4077
// (always treat the table function as implicitly LATERAL).
String expandedQuery = Expander.create(flinkPlanner).expanded(originalQuery).substitute(this::getQuotedSqlString);
PlannerQueryOperation operation = toQueryOperation(flinkPlanner, validateQuery);
ResolvedSchema schema = operation.getResolvedSchema();
// the column name with the names in view column list.
if (!fieldNames.isEmpty()) {
// alias column names:
List<String> inputFieldNames = schema.getColumnNames();
List<String> aliasFieldNames = fieldNames.stream().map(SqlNode::toString).collect(Collectors.toList());
if (inputFieldNames.size() != aliasFieldNames.size()) {
throw new ValidationException(String.format("VIEW definition and input fields not match:\n\tDef fields: %s.\n\tInput fields: %s.", aliasFieldNames, inputFieldNames));
}
schema = ResolvedSchema.physical(aliasFieldNames, schema.getColumnDataTypes());
}
return CatalogView.of(Schema.newBuilder().fromResolvedSchema(schema).build(), comment, originalQuery, expandedQuery, props);
}
use of org.apache.flink.table.api.ValidationException in project flink by apache.
the class MatchRowTimeFunction method checkOperandTypes.
@Override
public boolean checkOperandTypes(SqlCallBinding callBinding, boolean throwOnFailure) {
List<SqlNode> operands = callBinding.operands();
int n = operands.size();
assert n == 0 || n == 1;
if (n == 0) {
return true;
} else {
SqlNode operand = callBinding.operand(0);
if (operand.getKind() != SqlKind.IDENTIFIER) {
if (throwOnFailure) {
throw new ValidationException(String.format("The function %s requires a field reference as argument, but actual argument is not a simple field reference.", callBinding.getOperator().getName()));
} else {
return false;
}
}
RelDataType operandType = callBinding.getOperandType(0);
if (FlinkTypeFactory.isRowtimeIndicatorType(operandType)) {
return true;
} else {
if (throwOnFailure) {
throw new ValidationException(String.format("The function %s requires argument to be a row time attribute type, but is '%s'.", callBinding.getOperator().getName(), operandType));
} else {
return false;
}
}
}
}
Aggregations