use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeFamily in project calcite by apache.
the class StandardConvertletTable method consistentType.
private static RelDataType consistentType(SqlRexContext cx, SqlOperandTypeChecker.Consistency consistency, List<RelDataType> types) {
switch(consistency) {
case COMPARE:
final Set<RelDataTypeFamily> families = Sets.newHashSet(RexUtil.families(types));
if (families.size() < 2) {
// All arguments are of same family. No need for explicit casts.
return null;
}
final List<RelDataType> nonCharacterTypes = Lists.newArrayList();
for (RelDataType type : types) {
if (type.getFamily() != SqlTypeFamily.CHARACTER) {
nonCharacterTypes.add(type);
}
}
if (!nonCharacterTypes.isEmpty()) {
final int typeCount = types.size();
types = nonCharacterTypes;
if (nonCharacterTypes.size() < typeCount) {
final RelDataTypeFamily family = nonCharacterTypes.get(0).getFamily();
if (family instanceof SqlTypeFamily) {
// argument. Give ourselves some headroom.
switch((SqlTypeFamily) family) {
case INTEGER:
case NUMERIC:
nonCharacterTypes.add(cx.getTypeFactory().createSqlType(SqlTypeName.BIGINT));
}
}
}
}
// fall through
case LEAST_RESTRICTIVE:
return cx.getTypeFactory().leastRestrictive(types);
default:
return null;
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeFamily in project calcite by apache.
the class SqlWindow method validate.
@Override
public void validate(SqlValidator validator, SqlValidatorScope scope) {
// REVIEW
SqlValidatorScope operandScope = scope;
SqlIdentifier declName = this.declName;
SqlIdentifier refName = this.refName;
SqlNodeList partitionList = this.partitionList;
SqlNodeList orderList = this.orderList;
SqlLiteral isRows = this.isRows;
SqlNode lowerBound = this.lowerBound;
SqlNode upperBound = this.upperBound;
SqlLiteral allowPartial = this.allowPartial;
if (refName != null) {
SqlWindow win = validator.resolveWindow(this, operandScope, false);
partitionList = win.partitionList;
orderList = win.orderList;
isRows = win.isRows;
lowerBound = win.lowerBound;
upperBound = win.upperBound;
allowPartial = win.allowPartial;
}
for (SqlNode partitionItem : partitionList) {
try {
partitionItem.accept(Util.OverFinder.INSTANCE);
} catch (ControlFlowException e) {
throw validator.newValidationError(this, RESOURCE.partitionbyShouldNotContainOver());
}
partitionItem.validateExpr(validator, operandScope);
}
for (SqlNode orderItem : orderList) {
boolean savedColumnReferenceExpansion = validator.getColumnReferenceExpansion();
validator.setColumnReferenceExpansion(false);
try {
orderItem.accept(Util.OverFinder.INSTANCE);
} catch (ControlFlowException e) {
throw validator.newValidationError(this, RESOURCE.orderbyShouldNotContainOver());
}
try {
orderItem.validateExpr(validator, scope);
} finally {
validator.setColumnReferenceExpansion(savedColumnReferenceExpansion);
}
}
// 6.10 rule 6a Function RANK & DENSE_RANK require ORDER BY clause
if (orderList.size() == 0 && !SqlValidatorUtil.containsMonotonic(scope) && windowCall != null && windowCall.getOperator().requiresOrder()) {
throw validator.newValidationError(this, RESOURCE.funcNeedsOrderBy());
}
// Run framing checks if there are any
if (upperBound != null || lowerBound != null) {
// 6.10 Rule 6a RANK & DENSE_RANK do not allow ROWS or RANGE
if (windowCall != null && !windowCall.getOperator().allowsFraming()) {
throw validator.newValidationError(isRows, RESOURCE.rankWithFrame());
}
SqlTypeFamily orderTypeFam = null;
// SQL03 7.10 Rule 11a
if (orderList.size() > 0) {
// if order by is a compound list then range not allowed
if (orderList.size() > 1 && !isRows()) {
throw validator.newValidationError(isRows, RESOURCE.compoundOrderByProhibitsRange());
}
// get the type family for the sort key for Frame Boundary Val.
RelDataType orderType = validator.deriveType(operandScope, orderList.get(0));
orderTypeFam = orderType.getSqlTypeName().getFamily();
} else {
// sorted already
if (!isRows() && !SqlValidatorUtil.containsMonotonic(scope)) {
throw validator.newValidationError(this, RESOURCE.overMissingOrderBy());
}
}
// Let the bounds validate themselves
validateFrameBoundary(lowerBound, isRows(), orderTypeFam, validator, operandScope);
validateFrameBoundary(upperBound, isRows(), orderTypeFam, validator, operandScope);
// Validate across boundaries. 7.10 Rule 8 a-d
checkSpecialLiterals(this, validator);
} else if (orderList.size() == 0 && !SqlValidatorUtil.containsMonotonic(scope) && windowCall != null && windowCall.getOperator().requiresOrder()) {
throw validator.newValidationError(this, RESOURCE.overMissingOrderBy());
}
if (!isRows() && !isAllowPartial()) {
throw validator.newValidationError(allowPartial, RESOURCE.cannotUseDisallowPartialWithRange());
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeFamily in project beam by apache.
the class SqlOperators method createUdfOperator.
private static SqlUserDefinedFunction createUdfOperator(String name, Method method, final SqlSyntax syntax, String funGroup, String jarPath) {
Function function = ZetaSqlScalarFunctionImpl.create(method, funGroup, jarPath);
final RelDataTypeFactory typeFactory = createTypeFactory();
List<RelDataType> argTypes = new ArrayList<>();
List<SqlTypeFamily> typeFamilies = new ArrayList<>();
for (FunctionParameter o : function.getParameters()) {
final RelDataType type = o.getType(typeFactory);
argTypes.add(type);
typeFamilies.add(Util.first(type.getSqlTypeName().getFamily(), SqlTypeFamily.ANY));
}
final FamilyOperandTypeChecker typeChecker = OperandTypes.family(typeFamilies, i -> function.getParameters().get(i).isOptional());
final List<RelDataType> paramTypes = toSql(typeFactory, argTypes);
return new SqlUserDefinedFunction(new SqlIdentifier(name, SqlParserPos.ZERO), infer((ScalarFunction) function), InferTypes.explicit(argTypes), typeChecker, paramTypes, function) {
@Override
public SqlSyntax getSyntax() {
return syntax;
}
};
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeFamily in project hazelcast by hazelcast.
the class HazelcastTypeUtils method toHazelcastTypeFromSqlTypeName.
public static QueryDataType toHazelcastTypeFromSqlTypeName(SqlTypeName sqlTypeName) {
SqlTypeFamily sqlTypeNameFamily = sqlTypeName.getFamily();
if (sqlTypeNameFamily == SqlTypeFamily.INTERVAL_YEAR_MONTH) {
return QueryDataType.INTERVAL_YEAR_MONTH;
} else if (sqlTypeNameFamily == SqlTypeFamily.INTERVAL_DAY_TIME) {
return QueryDataType.INTERVAL_DAY_SECOND;
}
QueryDataType queryDataType = CALCITE_TO_HZ.get(sqlTypeName);
if (queryDataType == null) {
throw new IllegalArgumentException("Unexpected SQL type: " + sqlTypeName);
}
return queryDataType;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeFamily in project hive by apache.
the class SqlFunctionConverter method getUDFInfo.
private static CalciteUDFInfo getUDFInfo(String hiveUdfName, ImmutableList<RelDataType> calciteArgTypes, RelDataType calciteRetType) {
CalciteUDFInfo udfInfo = new CalciteUDFInfo();
udfInfo.udfName = hiveUdfName;
udfInfo.returnTypeInference = ReturnTypes.explicit(calciteRetType);
udfInfo.operandTypeInference = InferTypes.explicit(calciteArgTypes);
ImmutableList.Builder<SqlTypeFamily> typeFamilyBuilder = new ImmutableList.Builder<SqlTypeFamily>();
for (RelDataType at : calciteArgTypes) {
typeFamilyBuilder.add(Util.first(at.getSqlTypeName().getFamily(), SqlTypeFamily.ANY));
}
udfInfo.operandTypeChecker = OperandTypes.family(typeFamilyBuilder.build());
return udfInfo;
}
Aggregations