use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlNode in project calcite by apache.
the class SqlValidatorImpl method validateValues.
/**
* Validates a VALUES clause.
*
* @param node Values clause
* @param targetRowType Row type which expression must conform to
* @param scope Scope within which clause occurs
*/
protected void validateValues(SqlCall node, RelDataType targetRowType, final SqlValidatorScope scope) {
assert node.getKind() == SqlKind.VALUES;
final List<SqlNode> operands = node.getOperandList();
for (SqlNode operand : operands) {
if (!(operand.getKind() == SqlKind.ROW)) {
throw Util.needToImplement("Values function where operands are scalars");
}
SqlCall rowConstructor = (SqlCall) operand;
if (conformance.isInsertSubsetColumnsAllowed() && targetRowType.isStruct() && rowConstructor.operandCount() < targetRowType.getFieldCount()) {
targetRowType = typeFactory.createStructType(targetRowType.getFieldList().subList(0, rowConstructor.operandCount()));
} else if (targetRowType.isStruct() && rowConstructor.operandCount() != targetRowType.getFieldCount()) {
return;
}
inferUnknownTypes(targetRowType, scope, rowConstructor);
if (targetRowType.isStruct()) {
for (Pair<SqlNode, RelDataTypeField> pair : Pair.zip(rowConstructor.getOperandList(), targetRowType.getFieldList())) {
if (!pair.right.getType().isNullable() && SqlUtil.isNullLiteral(pair.left, false)) {
throw newValidationError(node, RESOURCE.columnNotNullable(pair.right.getName()));
}
}
}
}
for (SqlNode operand : operands) {
operand.validate(this, scope);
}
// validate that all row types have the same number of columns
// and that expressions in each column are compatible.
// A values expression is turned into something that looks like
// ROW(type00, type01,...), ROW(type11,...),...
final int rowCount = operands.size();
if (rowCount >= 2) {
SqlCall firstRow = (SqlCall) operands.get(0);
final int columnCount = firstRow.operandCount();
// 1. check that all rows have the same cols length
for (SqlNode operand : operands) {
SqlCall thisRow = (SqlCall) operand;
if (columnCount != thisRow.operandCount()) {
throw newValidationError(node, RESOURCE.incompatibleValueType(SqlStdOperatorTable.VALUES.getName()));
}
}
// 2. check if types at i:th position in each row are compatible
for (int col = 0; col < columnCount; col++) {
final int c = col;
final RelDataType type = typeFactory.leastRestrictive(new AbstractList<RelDataType>() {
public RelDataType get(int row) {
SqlCall thisRow = (SqlCall) operands.get(row);
return deriveType(scope, thisRow.operand(c));
}
public int size() {
return rowCount;
}
});
if (null == type) {
throw newValidationError(node, RESOURCE.incompatibleValueType(SqlStdOperatorTable.VALUES.getName()));
}
}
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlNode in project calcite by apache.
the class SqlValidatorImpl method registerSubQueries.
private void registerSubQueries(SqlValidatorScope parentScope, SqlNode node) {
if (node == null) {
return;
}
if (node.getKind().belongsTo(SqlKind.QUERY) || node.getKind() == SqlKind.MULTISET_QUERY_CONSTRUCTOR || node.getKind() == SqlKind.MULTISET_VALUE_CONSTRUCTOR) {
registerQuery(parentScope, null, node, node, null, false);
} else if (node instanceof SqlCall) {
validateNodeFeature(node);
SqlCall call = (SqlCall) node;
for (int i = 0; i < call.operandCount(); i++) {
registerOperandSubQueries(parentScope, call, i);
}
} else if (node instanceof SqlNodeList) {
SqlNodeList list = (SqlNodeList) node;
for (int i = 0, count = list.size(); i < count; i++) {
SqlNode listNode = list.get(i);
if (listNode.getKind().belongsTo(SqlKind.QUERY)) {
listNode = SqlStdOperatorTable.SCALAR_QUERY.createCall(listNode.getParserPosition(), listNode);
list.set(i, listNode);
}
registerSubQueries(parentScope, listNode);
}
} else {
// atomic node -- can be ignored
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlNode in project calcite by apache.
the class SqlValidatorImpl method validateModality.
public boolean validateModality(SqlSelect select, SqlModality modality, boolean fail) {
final SelectScope scope = getRawSelectScope(select);
switch(modality) {
case STREAM:
if (scope.children.size() == 1) {
for (ScopeChild child : scope.children) {
if (!child.namespace.supportsModality(modality)) {
if (fail) {
throw newValidationError(child.namespace.getNode(), Static.RESOURCE.cannotConvertToStream(child.name));
} else {
return false;
}
}
}
} else {
int supportsModalityCount = 0;
for (ScopeChild child : scope.children) {
if (child.namespace.supportsModality(modality)) {
++supportsModalityCount;
}
}
if (supportsModalityCount == 0) {
if (fail) {
String inputs = Joiner.on(", ").join(scope.getChildNames());
throw newValidationError(select, Static.RESOURCE.cannotStreamResultsForNonStreamingInputs(inputs));
} else {
return false;
}
}
}
break;
default:
for (ScopeChild child : scope.children) {
if (!child.namespace.supportsModality(modality)) {
if (fail) {
throw newValidationError(child.namespace.getNode(), Static.RESOURCE.cannotConvertToRelation(child.name));
} else {
return false;
}
}
}
}
// Make sure that aggregation is possible.
final SqlNode aggregateNode = getAggregate(select);
if (aggregateNode != null) {
switch(modality) {
case STREAM:
SqlNodeList groupList = select.getGroup();
if (groupList == null || !SqlValidatorUtil.containsMonotonic(scope, groupList)) {
if (fail) {
throw newValidationError(aggregateNode, Static.RESOURCE.streamMustGroupByMonotonic());
} else {
return false;
}
}
}
}
// Make sure that ORDER BY is possible.
final SqlNodeList orderList = select.getOrderList();
if (orderList != null && orderList.size() > 0) {
switch(modality) {
case STREAM:
if (!hasSortedPrefix(scope, orderList)) {
if (fail) {
throw newValidationError(orderList.get(0), Static.RESOURCE.streamMustOrderByMonotonic());
} else {
return false;
}
}
}
}
return true;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlNode in project calcite by apache.
the class SqlValidatorImpl method getLogicalTargetRowType.
protected RelDataType getLogicalTargetRowType(RelDataType targetRowType, SqlInsert insert) {
if (insert.getTargetColumnList() == null && conformance.isInsertSubsetColumnsAllowed()) {
// Target an implicit subset of columns.
final SqlNode source = insert.getSource();
final RelDataType sourceRowType = getNamespace(source).getRowType();
final RelDataType logicalSourceRowType = getLogicalSourceRowType(sourceRowType, insert);
final RelDataType implicitTargetRowType = typeFactory.createStructType(targetRowType.getFieldList().subList(0, logicalSourceRowType.getFieldCount()));
final SqlValidatorNamespace targetNamespace = getNamespace(insert);
validateNamespace(targetNamespace, implicitTargetRowType);
return implicitTargetRowType;
} else {
// set of columns.
return targetRowType;
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlNode in project calcite by apache.
the class SqlValidatorImpl method validateJoin.
protected void validateJoin(SqlJoin join, SqlValidatorScope scope) {
SqlNode left = join.getLeft();
SqlNode right = join.getRight();
SqlNode condition = join.getCondition();
boolean natural = join.isNatural();
final JoinType joinType = join.getJoinType();
final JoinConditionType conditionType = join.getConditionType();
final SqlValidatorScope joinScope = scopes.get(join);
validateFrom(left, unknownType, joinScope);
validateFrom(right, unknownType, joinScope);
// Validate condition.
switch(conditionType) {
case NONE:
Preconditions.checkArgument(condition == null);
break;
case ON:
Preconditions.checkArgument(condition != null);
SqlNode expandedCondition = expand(condition, joinScope);
join.setOperand(5, expandedCondition);
condition = join.getCondition();
validateWhereOrOn(joinScope, condition, "ON");
checkRollUp(null, join, condition, joinScope, "ON");
break;
case USING:
SqlNodeList list = (SqlNodeList) condition;
// Parser ensures that using clause is not empty.
Preconditions.checkArgument(list.size() > 0, "Empty USING clause");
for (int i = 0; i < list.size(); i++) {
SqlIdentifier id = (SqlIdentifier) list.get(i);
final RelDataType leftColType = validateUsingCol(id, left);
final RelDataType rightColType = validateUsingCol(id, right);
if (!SqlTypeUtil.isComparable(leftColType, rightColType)) {
throw newValidationError(id, RESOURCE.naturalOrUsingColumnNotCompatible(id.getSimple(), leftColType.toString(), rightColType.toString()));
}
checkRollUpInUsing(id, left);
checkRollUpInUsing(id, right);
}
break;
default:
throw Util.unexpected(conditionType);
}
// Validate NATURAL.
if (natural) {
if (condition != null) {
throw newValidationError(condition, RESOURCE.naturalDisallowsOnOrUsing());
}
// Join on fields that occur exactly once on each side. Ignore
// fields that occur more than once on either side.
final RelDataType leftRowType = getNamespace(left).getRowType();
final RelDataType rightRowType = getNamespace(right).getRowType();
List<String> naturalColumnNames = SqlValidatorUtil.deriveNaturalJoinColumnList(leftRowType, rightRowType);
// Check compatibility of the chosen columns.
final SqlNameMatcher nameMatcher = catalogReader.nameMatcher();
for (String name : naturalColumnNames) {
final RelDataType leftColType = nameMatcher.field(leftRowType, name).getType();
final RelDataType rightColType = nameMatcher.field(rightRowType, name).getType();
if (!SqlTypeUtil.isComparable(leftColType, rightColType)) {
throw newValidationError(join, RESOURCE.naturalOrUsingColumnNotCompatible(name, leftColType.toString(), rightColType.toString()));
}
}
}
// a NATURAL keyword?
switch(joinType) {
case INNER:
case LEFT:
case RIGHT:
case FULL:
if ((condition == null) && !natural) {
throw newValidationError(join, RESOURCE.joinRequiresCondition());
}
break;
case COMMA:
case CROSS:
if (condition != null) {
throw newValidationError(join.getConditionTypeNode(), RESOURCE.crossJoinDisallowsCondition());
}
if (natural) {
throw newValidationError(join.getConditionTypeNode(), RESOURCE.crossJoinDisallowsCondition());
}
break;
default:
throw Util.unexpected(joinType);
}
}
Aggregations