use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeFamily in project hive by apache.
the class SqlFunctionConverter method getUDFInfo.
private static CalciteUDFInfo getUDFInfo(String hiveUdfName, List<RelDataType> calciteArgTypes, RelDataType calciteRetType) {
CalciteUDFInfo udfInfo = new CalciteUDFInfo();
udfInfo.udfName = hiveUdfName;
udfInfo.returnTypeInference = ReturnTypes.explicit(calciteRetType);
udfInfo.operandTypeInference = InferTypes.explicit(calciteArgTypes);
ImmutableList.Builder<SqlTypeFamily> typeFamilyBuilder = new ImmutableList.Builder<SqlTypeFamily>();
for (RelDataType at : calciteArgTypes) {
typeFamilyBuilder.add(Util.first(at.getSqlTypeName().getFamily(), SqlTypeFamily.ANY));
}
udfInfo.operandTypeChecker = OperandTypes.family(typeFamilyBuilder.build());
return udfInfo;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeFamily in project hazelcast by hazelcast.
the class HazelcastSqlToRelConverter method convertLiteral.
/**
* Convert a literal taking into account the type that we assigned to it during validation.
* Otherwise Apache Calcite will try to deduce literal type again, leading to incorrect exposed types.
* <p>
* For example, {@code [x:BIGINT > 1]} is interpreted as {@code [x:BIGINT > 1:BIGINT]} during the validation.
* If this method is not invoked, Apache Calcite will convert it to {[@code x:BIGINT > 1:TINYINT]} instead.
*/
private RexNode convertLiteral(SqlLiteral literal, RelDataTypeFactory typeFactory) {
RelDataType type = validator.getValidatedNodeType(literal);
Object value;
if (HazelcastTypeUtils.isIntervalType(type) && !SqlUtil.isNullLiteral(literal, false)) {
// Normalize interval literals to YEAR-MONTH or DAY-SECOND literals.
value = literal.getValueAs(BigDecimal.class);
SqlTypeFamily family = type.getSqlTypeName().getFamily();
if (family == SqlTypeFamily.INTERVAL_YEAR_MONTH) {
type = typeFactory.createSqlIntervalType(INTERVAL_YEAR_MONTH);
} else {
assert family == SqlTypeFamily.INTERVAL_DAY_TIME;
type = typeFactory.createSqlIntervalType(INTERVAL_DAY_SECOND);
}
} else {
value = literal.getValue();
}
return getRexBuilder().makeLiteral(value, type, true);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeFamily in project beam by apache.
the class BeamSqlUnparseContext method toSql.
@Override
public SqlNode toSql(RexProgram program, RexNode rex) {
if (rex.getKind().equals(SqlKind.LITERAL)) {
final RexLiteral literal = (RexLiteral) rex;
SqlTypeName name = literal.getTypeName();
SqlTypeFamily family = name.getFamily();
if (SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE.equals(name)) {
TimestampString timestampString = literal.getValueAs(TimestampString.class);
return new SqlDateTimeLiteral(timestampString, POS);
} else if (SqlTypeFamily.BINARY.equals(family)) {
ByteString byteString = literal.getValueAs(ByteString.class);
BitString bitString = BitString.createFromHexString(byteString.toString(16));
return new SqlByteStringLiteral(bitString, POS);
} else if (SqlTypeFamily.CHARACTER.equals(family)) {
String escaped = ESCAPE_FOR_ZETA_SQL.translate(literal.getValueAs(String.class));
return SqlLiteral.createCharString(escaped, POS);
} else if (SqlTypeName.SYMBOL.equals(literal.getTypeName())) {
Enum symbol = literal.getValueAs(Enum.class);
if (TimeUnitRange.DOW.equals(symbol)) {
return new ReplaceLiteral(literal, POS, "DAYOFWEEK");
} else if (TimeUnitRange.DOY.equals(symbol)) {
return new ReplaceLiteral(literal, POS, "DAYOFYEAR");
} else if (TimeUnitRange.WEEK.equals(symbol)) {
return new ReplaceLiteral(literal, POS, "ISOWEEK");
}
}
} else if (rex.getKind().equals(SqlKind.DYNAMIC_PARAM)) {
final RexDynamicParam param = (RexDynamicParam) rex;
final int index = param.getIndex();
final String name = "null_param_" + index;
nullParams.put(name, param.getType());
return new NamedDynamicParam(index, POS, name);
} else if (SqlKind.SEARCH.equals(rex.getKind())) {
// Workaround CALCITE-4716
RexCall search = (RexCall) rex;
RexLocalRef ref = (RexLocalRef) search.operands.get(1);
RexLiteral literal = (RexLiteral) program.getExprList().get(ref.getIndex());
rex = search.clone(search.getType(), ImmutableList.of(search.operands.get(0), literal));
}
return super.toSql(program, rex);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeFamily in project flink by apache.
the class HiveParserSqlFunctionConverter method getUDFInfo.
private static CalciteUDFInfo getUDFInfo(String hiveUdfName, List<RelDataType> calciteArgTypes, RelDataType calciteRetType) {
CalciteUDFInfo udfInfo = new CalciteUDFInfo();
udfInfo.udfName = hiveUdfName;
String[] nameParts = hiveUdfName.split("\\.");
if (nameParts.length > 1) {
udfInfo.identifier = new SqlIdentifier(Arrays.stream(nameParts).collect(Collectors.toList()), new SqlParserPos(0, 0));
}
udfInfo.returnTypeInference = ReturnTypes.explicit(calciteRetType);
udfInfo.operandTypeInference = InferTypes.explicit(calciteArgTypes);
List<SqlTypeFamily> typeFamily = new ArrayList<>();
for (RelDataType argType : calciteArgTypes) {
typeFamily.add(Util.first(argType.getSqlTypeName().getFamily(), SqlTypeFamily.ANY));
}
udfInfo.operandTypeChecker = OperandTypes.family(Collections.unmodifiableList(typeFamily));
return udfInfo;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.type.SqlTypeFamily in project calcite by apache.
the class SqlWindow method validateFrameBoundary.
private void validateFrameBoundary(SqlNode bound, boolean isRows, SqlTypeFamily orderTypeFam, SqlValidator validator, SqlValidatorScope scope) {
if (null == bound) {
return;
}
bound.validate(validator, scope);
switch(bound.getKind()) {
case LITERAL:
// "CURRENT_ROW","unbounded preceding" & "unbounded following"
break;
case OTHER:
case FOLLOWING:
case PRECEDING:
assert bound instanceof SqlCall;
final SqlNode boundVal = ((SqlCall) bound).operand(0);
// values, but allow zero.
if (isRows) {
if (boundVal instanceof SqlNumericLiteral) {
final SqlNumericLiteral boundLiteral = (SqlNumericLiteral) boundVal;
if ((!boundLiteral.isExact()) || (boundLiteral.getScale() != 0) || (0 > boundLiteral.longValue(true))) {
// true == throw if not exact (we just tested that - right?)
throw validator.newValidationError(boundVal, RESOURCE.rowMustBeNonNegativeIntegral());
}
} else {
// Allow expressions in ROWS clause
}
}
// and order by type are compatible
if (orderTypeFam != null && !isRows) {
RelDataType bndType = validator.deriveType(scope, boundVal);
SqlTypeFamily bndTypeFam = bndType.getSqlTypeName().getFamily();
switch(orderTypeFam) {
case NUMERIC:
if (SqlTypeFamily.NUMERIC != bndTypeFam) {
throw validator.newValidationError(boundVal, RESOURCE.orderByRangeMismatch());
}
break;
case DATE:
case TIME:
case TIMESTAMP:
if (SqlTypeFamily.INTERVAL_DAY_TIME != bndTypeFam && SqlTypeFamily.INTERVAL_YEAR_MONTH != bndTypeFam) {
throw validator.newValidationError(boundVal, RESOURCE.orderByRangeMismatch());
}
break;
default:
throw validator.newValidationError(boundVal, RESOURCE.orderByDataTypeProhibitsRange());
}
}
break;
default:
throw new AssertionError("Unexpected node type");
}
}
Aggregations