use of com.hazelcast.sql.impl.type.QueryDataType in project hazelcast by hazelcast.
the class HazelcastSqlToRelConverter method convertCast.
/**
* Convert CAST expression fixing several Apache Calcite problems with literals along the way (see inline JavaDoc).
*/
private RexNode convertCast(SqlCall call, Blackboard blackboard) {
SqlNode operand = call.operand(0);
RexNode convertedOperand = blackboard.convertExpression(operand);
RelDataType from = validator.getValidatedNodeType(operand);
RelDataType to = validator.getValidatedNodeType(call);
QueryDataType fromType = HazelcastTypeUtils.toHazelcastType(from);
QueryDataType toType = HazelcastTypeUtils.toHazelcastType(to);
Literal literal = LiteralUtils.literal(convertedOperand);
if (literal != null && ((RexLiteral) convertedOperand).getTypeName() != SqlTypeName.NULL) {
// types to ensure that we throw consistent error messages for all literal-related conversions errors.
try {
// The literal's type might be different from the operand type for example here:
// CAST(CAST(42 AS SMALLINT) AS TINYINT)
// The operand of the outer cast is validated as a SMALLINT, however the operand, thanks to the
// simplification in RexBuilder.makeCast(), is converted to a literal [42:SMALLINT]. And LiteralUtils converts
// this operand to [42:TINYINT] - we have to use the literal's type instead of the validated operand type.
QueryDataType actualFromType = HazelcastTypeUtils.toHazelcastTypeFromSqlTypeName(literal.getTypeName());
toType.getConverter().convertToSelf(actualFromType.getConverter(), literal.getValue());
} catch (Exception e) {
throw literalConversionException(validator, call, literal, toType, e);
}
// DOUBLE literals are converted to a string with scientific conventions (e.g., 1.1E1 instead of 11.0);
if (SqlTypeName.CHAR_TYPES.contains(to.getSqlTypeName())) {
return getRexBuilder().makeLiteral(literal.getStringValue(), to, true);
}
// To workaround the problem, we perform the conversion manually.
if (SqlTypeName.CHAR_TYPES.contains(from.getSqlTypeName()) && to.getSqlTypeName() == SqlTypeName.TIME) {
LocalTime time = fromType.getConverter().asTime(literal.getStringValue());
TimeString timeString = new TimeString(time.getHour(), time.getMinute(), time.getSecond());
return getRexBuilder().makeLiteral(timeString, to, true);
}
// be "true". See CastFunctionIntegrationTest.testApproximateTypeSimplification - it will fail without this fix.
if (fromType.getTypeFamily().isNumeric()) {
if (toType.getTypeFamily().isNumericApproximate()) {
Converter converter = Converters.getConverter(literal.getValue().getClass());
Object convertedValue = toType.getConverter().convertToSelf(converter, literal.getValue());
return getRexBuilder().makeLiteral(convertedValue, to, false);
}
}
}
if (literal != null && HazelcastTypeUtils.isJsonType(to)) {
return getRexBuilder().makeCall(HazelcastJsonParseFunction.INSTANCE, convertedOperand);
}
// Delegate to Apache Calcite.
return getRexBuilder().makeCast(to, convertedOperand);
}
use of com.hazelcast.sql.impl.type.QueryDataType in project hazelcast by hazelcast.
the class PlanExecutor method execute.
SqlResult execute(CreateViewPlan plan) {
OptimizerContext context = plan.context();
SqlNode sqlNode = context.parse(plan.viewQuery()).getNode();
RelNode relNode = context.convert(sqlNode).getRel();
List<RelDataTypeField> fieldList = relNode.getRowType().getFieldList();
List<String> fieldNames = new ArrayList<>();
List<QueryDataType> fieldTypes = new ArrayList<>();
for (RelDataTypeField field : fieldList) {
fieldNames.add(field.getName());
fieldTypes.add(toHazelcastType(field.getType()));
}
View view = new View(plan.viewName(), plan.viewQuery(), plan.isStream(), fieldNames, fieldTypes);
if (plan.isReplace()) {
View existingView = catalog.getView(plan.viewName());
if (existingView != null) {
checkViewNewRowType(existingView, view);
}
}
catalog.createView(view, plan.isReplace(), plan.ifNotExists());
return UpdateSqlResultImpl.createUpdateCountResult(0);
}
use of com.hazelcast.sql.impl.type.QueryDataType in project hazelcast by hazelcast.
the class PlanExecutor method checkViewNewRowType.
/**
* When a view is replaced, the new view must contain all the
* original columns with the same types. Adding new columns or
* reordering them is allowed.
* <p>
* This is an interim mitigation for
* https://github.com/hazelcast/hazelcast/issues/20032. It disallows
* incompatible changes when doing CREATE OR REPLACE VIEW, however
* incompatible changes are still possible with DROP VIEW followed
* by a CREATE VIEW.
*/
private static void checkViewNewRowType(View original, View replacement) {
Map<String, QueryDataType> newTypes = new HashMap<>();
for (int i = 0; i < replacement.viewColumnNames().size(); i++) {
newTypes.put(replacement.viewColumnNames().get(i), replacement.viewColumnTypes().get(i));
}
// each original name must be present and have the same type
for (int i = 0; i < original.viewColumnNames().size(); i++) {
QueryDataType origType = original.viewColumnTypes().get(i);
String origName = original.viewColumnNames().get(i);
QueryDataType newType = newTypes.get(origName);
if (newType == null) {
throw QueryException.error("Can't replace view, the new view doesn't contain column '" + origName + "'");
}
if (newType.getTypeFamily() != origType.getTypeFamily()) {
throw QueryException.error("Can't replace view, the type for column '" + origName + "' changed from " + origType.getTypeFamily() + " to " + newType.getTypeFamily());
}
}
}
use of com.hazelcast.sql.impl.type.QueryDataType in project hazelcast by hazelcast.
the class RowProjector method createExtractors.
private static QueryExtractor[] createExtractors(QueryTarget target, String[] paths, QueryDataType[] types) {
QueryExtractor[] extractors = new QueryExtractor[paths.length];
for (int i = 0; i < paths.length; i++) {
String path = paths[i];
QueryDataType type = types[i];
extractors[i] = target.createExtractor(path, type);
}
return extractors;
}
use of com.hazelcast.sql.impl.type.QueryDataType in project hazelcast by hazelcast.
the class AvroResolver method resolveFields.
static List<MappingField> resolveFields(Schema schema) {
Map<String, MappingField> fields = new LinkedHashMap<>();
for (Schema.Field avroField : schema.getFields()) {
String name = avroField.name();
QueryDataType type = resolveType(avroField.schema().getType());
MappingField field = new MappingField(name, type);
fields.putIfAbsent(field.name(), field);
}
return new ArrayList<>(fields.values());
}
Aggregations